I declare a struct and allocate memory. Code: typedef struct data { char date[days_of_data][9], symbol[9]; float price[days_of_data]; int vol[days_of_data]; int index[days_of_data]; float risk; } _data ; daily_data = calloc(num_etfs + 1, sizeof(*daily_data)); for(i = 0; i < num_etfs + 1; i++) daily_data[i] = *(_data*)calloc(1, sizeof(_data)); Now I call a sort routine and confirm the sorting by following with printf statements. However, when I print out the sorted data, it is not sorted. Code: sort (daily_data, num); void sort(structdata *sorted, int num) { int i, j = 0; int swapped = true; struct data tmp; while (swapped) { swapped = false; j++; for (i = 1; i < num - j; i++) { if (sorted[i].risk > sorted[i + 1].risk) { // printf("%f %f\n", sorted[i+1].risk, sorted[i].risk); tmp = sorted[i]; // printf("%f %f\n", tmp.risk, sorted[i].risk); sorted[i] = sorted[i + 1]; // printf("%f %f\n", sorted[i+1].risk, sorted[i].risk); sorted[i + 1] = tmp; // printf("%f %f\n", sorted[i+1].risk, sorted[i].risk); swapped = true; } } } for (i = 1; i < num_etfs; i++) printf("%s %f\n", sorted[i].symbol, sorted[i].risk ); }; Any help would be appreciated.
Cleaned up, refactored, leaks fixed, and made into an actual compilable test case. "dougsort.c" Code: #include <stdio.h> #include <stdlib.h> #define SYMLEN 9 #define true 1 #define false 0 typedef struct data { // char date[days_of_data][9], char symbol[ SYMLEN ]; // float price[days_of_data]; // int vol[days_of_data]; // int index[days_of_data]; float risk; } _data ; void bubble_sort( _data *sorted, int num ) { int i, j = 0; int swapped = true; _data tmp; while (swapped) { swapped = false; j++; for (i = 1; i < num - j; i++) { if (sorted[i].risk > sorted[i + 1].risk) { // printf("%f %f\n", sorted[i+1].risk, sorted[i].risk); tmp = sorted[i]; // printf("%f %f\n", tmp.risk, sorted[i].risk); sorted[i] = sorted[i + 1]; // printf("%f %f\n", sorted[i+1].risk, sorted[i].risk); sorted[i + 1] = tmp; // printf("%f %f\n", sorted[i+1].risk, sorted[i].risk); swapped = true; } } } }; // Chooses the algorithm for fake_risk() #define RISK_DECREASING 0 static float fake_risk( void ) { #if RISK_DECREASING // Incrementally decreasing values, 5000 down. static int ran = 5000; --ran; #else // Pseudo-random values, [0..9999] // The range is 0 .. (2**32) - 1, i.e. all unsigned 32-bit integers. u_int32_t ran = arc4random(); #endif return (float) ( ran % 10000 ); } // Fill with fake data. // Each item gets a symbol of "q" followed by its original array index. // For example, item 12 is named "q12". // The risk member is set to a fake_risk() value. // Item 0 is named "zero", with risk of 0.0f. static void fill_dailies( _data * dailies, const int count ) { // Force item 0 to be different. strlcpy( dailies -> symbol, "zero", SYMLEN ); dailies -> risk = 0.0f; int i; for ( i = 1; i < count; i++ ) { snprintf( dailies[ i ].symbol, SYMLEN, "q%d", i ); dailies[ i ].risk = fake_risk(); } } static void show_dailies( _data * dailies, const int count ) { printf("-- %d items --\n", count ); int i; for (i = 0; i < count; i++) { printf("%s %f\n", dailies[i].symbol, dailies[i].risk ); } } // Calls abort(), which exits the process, // so this function never returns. static void fail( char * reason ) { fflush( stdout ); fprintf( stderr, "failed: %s\n", reason ); fflush( stderr ); abort(); // never returns } #define num_etfs 20 int main( void ) { _data * daily_data; const int COUNT = num_etfs + 1; // calculate once, reuse below daily_data = calloc( COUNT, sizeof(*daily_data) ); if ( daily_data == NULL ) fail( "can't allocate daily_data memory" ); fill_dailies( daily_data, COUNT ); show_dailies( daily_data, COUNT ); bubble_sort( daily_data, COUNT ); show_dailies( daily_data, COUNT ); return 0; } Sample output: Code: -- 21 items -- zero 0.000000 q1 950.000000 q2 5067.000000 q3 8005.000000 q4 5588.000000 q5 3587.000000 q6 5478.000000 q7 6796.000000 q8 1081.000000 q9 3558.000000 q10 516.000000 q11 9510.000000 q12 3658.000000 q13 2194.000000 q14 7403.000000 q15 2438.000000 q16 917.000000 q17 3185.000000 q18 6142.000000 q19 5698.000000 q20 7839.000000 -- 21 items -- zero 0.000000 q10 516.000000 q16 917.000000 q1 950.000000 q8 1081.000000 q13 2194.000000 q15 2438.000000 q17 3185.000000 q9 3558.000000 q5 3587.000000 q12 3658.000000 q2 5067.000000 q6 5478.000000 q4 5588.000000 q19 5698.000000 q18 6142.000000 q7 6796.000000 q14 7403.000000 q20 7839.000000 q3 8005.000000 q11 9510.000000
I had to add stings.h to avoid a warning but otherwise it compiled and ran correctly. I wouldn't have expected anything else from you. Can you give me a hint why yours runs and mind doesn't. Thanks Doug.
I have no idea. Not even a guess. You didn't show any context or data from before the sort, so I can't comment on that. You didn't show the printf'ed output from after the sort, so I can't comment on that. I used your sort code nearly verbatim. Compare the routines yourself with the 'diff' cmd and see what's different. The memory leaks I eliminated wouldn't have caused the sort to malfunction (I think). The test case I provided shows the sort works correctly. Logically, if the problem isn't in the sort code, it must be somewhere else. Where else that might be, I can't begin to guess, since you didn't post any other code or info (like OS version, compiler version, etc.) My only remaining suggestion is to isolate, change, or disable the parts of your code one at a time until something different happens. That's elementary debugging.
Resolved? See here: http://forums.macrumors.com/threads/new-forum-feature-thread-prefixes.1042633/#post-11874380