Progress towards lexing situation
							parent
							
								
									020d09227c
								
							
						
					
					
						commit
						8b8d880561
					
				| 
						 | 
					@ -38,15 +38,7 @@ fill_buffer_summary(Buffer_Summary *buffer, Editing_File *file, Working_Set *wor
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        buffer->is_lexed = file->settings.tokens_exist;
 | 
					        buffer->is_lexed = file->settings.tokens_exist;
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        if (file->state.token_array.tokens &&
 | 
					        buffer->tokens_are_ready = (file->state.token_array.tokens && file->state.tokens_complete && !file->state.still_lexing);
 | 
				
			||||||
            file->state.tokens_complete &&
 | 
					 | 
				
			||||||
            !file->state.still_lexing){
 | 
					 | 
				
			||||||
            buffer->tokens_are_ready = 1;
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        else{
 | 
					 | 
				
			||||||
            buffer->tokens_are_ready = 0;
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        
 | 
					 | 
				
			||||||
        buffer->map_id = file->settings.base_map_id;
 | 
					        buffer->map_id = file->settings.base_map_id;
 | 
				
			||||||
        buffer->unwrapped_lines = file->settings.unwrapped_lines;
 | 
					        buffer->unwrapped_lines = file->settings.unwrapped_lines;
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
| 
						 | 
					@ -111,7 +103,7 @@ get_file_from_identifier(System_Functions *system, Working_Set *working_set, Buf
 | 
				
			||||||
    if (buffer.id){
 | 
					    if (buffer.id){
 | 
				
			||||||
        file = working_set_get_active_file(working_set, buffer.id);
 | 
					        file = working_set_get_active_file(working_set, buffer.id);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    else if (buffer.name){
 | 
					    else if (buffer.name != 0){
 | 
				
			||||||
        String name = make_string(buffer.name, buffer.name_len);
 | 
					        String name = make_string(buffer.name, buffer.name_len);
 | 
				
			||||||
        file = working_set_contains_name(working_set, name);
 | 
					        file = working_set_contains_name(working_set, name);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
| 
						 | 
					@ -856,12 +848,7 @@ DOC_SEE(Buffer_Setting_ID)
 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
                else{
 | 
					                else{
 | 
				
			||||||
                    if (value){
 | 
					                    if (value){
 | 
				
			||||||
                        if (!file->settings.virtual_white){
 | 
					                        file_first_lex(system, models, file);
 | 
				
			||||||
                            file_first_lex_parallel(system, models, file);
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                        else{
 | 
					 | 
				
			||||||
                            file_first_lex_serial(models, file);
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                    }
 | 
					                    }
 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
            }break;
 | 
					            }break;
 | 
				
			||||||
| 
						 | 
					@ -872,12 +859,7 @@ DOC_SEE(Buffer_Setting_ID)
 | 
				
			||||||
                    if ((b8)value != file->settings.tokens_without_strings){
 | 
					                    if ((b8)value != file->settings.tokens_without_strings){
 | 
				
			||||||
                        file_kill_tokens(system, &models->mem.heap, file);
 | 
					                        file_kill_tokens(system, &models->mem.heap, file);
 | 
				
			||||||
                        file->settings.tokens_without_strings = (b8)value;
 | 
					                        file->settings.tokens_without_strings = (b8)value;
 | 
				
			||||||
                        if (!file->settings.virtual_white){
 | 
					                        file_first_lex(system, models, file);
 | 
				
			||||||
                            file_first_lex_parallel(system, models, file);
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                        else{
 | 
					 | 
				
			||||||
                            file_first_lex_serial(models, file);
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                    }
 | 
					                    }
 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
                else{
 | 
					                else{
 | 
				
			||||||
| 
						 | 
					@ -893,12 +875,7 @@ DOC_SEE(Buffer_Setting_ID)
 | 
				
			||||||
                    if (fixed_value != file->settings.parse_context_id){
 | 
					                    if (fixed_value != file->settings.parse_context_id){
 | 
				
			||||||
                        file_kill_tokens(system, &models->mem.heap, file);
 | 
					                        file_kill_tokens(system, &models->mem.heap, file);
 | 
				
			||||||
                        file->settings.parse_context_id = fixed_value;
 | 
					                        file->settings.parse_context_id = fixed_value;
 | 
				
			||||||
                        if (!file->settings.virtual_white){
 | 
					                        file_first_lex(system, models, file);
 | 
				
			||||||
                            file_first_lex_parallel(system, models, file);
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                        else{
 | 
					 | 
				
			||||||
                            file_first_lex_serial(models, file);
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                    }
 | 
					                    }
 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
                else{
 | 
					                else{
 | 
				
			||||||
| 
						 | 
					@ -991,7 +968,7 @@ DOC_SEE(Buffer_Setting_ID)
 | 
				
			||||||
                if (value){
 | 
					                if (value){
 | 
				
			||||||
                    if (!file->settings.virtual_white){
 | 
					                    if (!file->settings.virtual_white){
 | 
				
			||||||
                        if (!file->settings.tokens_exist){
 | 
					                        if (!file->settings.tokens_exist){
 | 
				
			||||||
                            file_first_lex_serial(models, file);
 | 
					                            file_first_lex_serial(system, models, file);
 | 
				
			||||||
                        }
 | 
					                        }
 | 
				
			||||||
                        if (!file->state.still_lexing){
 | 
					                        if (!file->state.still_lexing){
 | 
				
			||||||
                            file->settings.virtual_white = true;
 | 
					                            file->settings.virtual_white = true;
 | 
				
			||||||
| 
						 | 
					@ -1337,7 +1314,7 @@ DOC_SEE(Buffer_Identifier)
 | 
				
			||||||
    if (file != 0){
 | 
					    if (file != 0){
 | 
				
			||||||
        result = BufferKillResult_Unkillable;
 | 
					        result = BufferKillResult_Unkillable;
 | 
				
			||||||
        if (!file->settings.never_kill){
 | 
					        if (!file->settings.never_kill){
 | 
				
			||||||
            b32 needs_to_save = buffer_needs_save(file);
 | 
					            b32 needs_to_save = file_needs_save(file);
 | 
				
			||||||
            if (!needs_to_save || (flags & BufferKill_AlwaysKill) != 0){
 | 
					            if (!needs_to_save || (flags & BufferKill_AlwaysKill) != 0){
 | 
				
			||||||
                if (models->hook_end_file != 0){
 | 
					                if (models->hook_end_file != 0){
 | 
				
			||||||
                    models->hook_end_file(&models->app_links, file->id.id);
 | 
					                    models->hook_end_file(&models->app_links, file->id.id);
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -186,7 +186,9 @@ buffer_batch_edit_update_cursors(Cursor_With_Index *sorted_positions, i32 count,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
internal i32
 | 
					internal i32
 | 
				
			||||||
eol_convert_in(char *dest, char *src, i32 size){
 | 
					eol_convert_in(char *dest, char *src, i32 size){
 | 
				
			||||||
    i32 i = 0, j = 0, k = 0;
 | 
					    i32 i = 0;
 | 
				
			||||||
 | 
					    i32 j = 0;
 | 
				
			||||||
 | 
					    i32 k = 0;
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    for (; j < size && src[j] != '\r'; ++j);
 | 
					    for (; j < size && src[j] != '\r'; ++j);
 | 
				
			||||||
    memcpy(dest, src, j);
 | 
					    memcpy(dest, src, j);
 | 
				
			||||||
| 
						 | 
					@ -210,7 +212,9 @@ eol_convert_in(char *dest, char *src, i32 size){
 | 
				
			||||||
 | 
					
 | 
				
			||||||
internal i32
 | 
					internal i32
 | 
				
			||||||
eol_in_place_convert_in(char *data, i32 size){
 | 
					eol_in_place_convert_in(char *data, i32 size){
 | 
				
			||||||
    i32 i = 0, j = 0, k = 0;
 | 
					    i32 i = 0;
 | 
				
			||||||
 | 
					    i32 j = 0;
 | 
				
			||||||
 | 
					    i32 k = 0;
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    for (; j < size && data[j] != '\r'; ++j);
 | 
					    for (; j < size && data[j] != '\r'; ++j);
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
| 
						 | 
					@ -235,7 +239,8 @@ eol_in_place_convert_in(char *data, i32 size){
 | 
				
			||||||
internal i32
 | 
					internal i32
 | 
				
			||||||
eol_convert_out(char *dest, i32 max, char *src, i32 size, i32 *size_out){
 | 
					eol_convert_out(char *dest, i32 max, char *src, i32 size, i32 *size_out){
 | 
				
			||||||
    i32 result = 1;
 | 
					    i32 result = 1;
 | 
				
			||||||
    i32 i = 0, j = 0;
 | 
					    i32 i = 0;
 | 
				
			||||||
 | 
					    i32 j = 0;
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    for (; i < size; ++i, ++j){
 | 
					    for (; i < size; ++i, ++j){
 | 
				
			||||||
        if (src[i] == '\n'){
 | 
					        if (src[i] == '\n'){
 | 
				
			||||||
| 
						 | 
					@ -506,6 +511,22 @@ buffer_edit_provide_memory(Gap_Buffer *buffer, void *new_data, i32 new_max){
 | 
				
			||||||
// High level buffer operations
 | 
					// High level buffer operations
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					internal String_Array
 | 
				
			||||||
 | 
					buffer_get_chunks(Partition *part, Gap_Buffer *buffer){
 | 
				
			||||||
 | 
					    String_Array result = {};
 | 
				
			||||||
 | 
					    result.vals = push_array(part, String, 0);
 | 
				
			||||||
 | 
					    if (buffer->size1 > 0){
 | 
				
			||||||
 | 
					        String *s = push_array(part, String, 1);
 | 
				
			||||||
 | 
					        *s = make_string(buffer->data, buffer->size1);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    if (buffer->size2 > 0){
 | 
				
			||||||
 | 
					        String *s = push_array(part, String, 1);
 | 
				
			||||||
 | 
					        *s = make_string(buffer->data + buffer->size1 + buffer->gap_size, buffer->size2);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    result.count = (i32)(push_array(part, String, 0) - result.vals);
 | 
				
			||||||
 | 
					    return(result);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
inline void
 | 
					inline void
 | 
				
			||||||
buffer_stringify(Gap_Buffer *buffer, i32 start, i32 end, char *out){
 | 
					buffer_stringify(Gap_Buffer *buffer, i32 start, i32 end, char *out){
 | 
				
			||||||
    Gap_Buffer_Stream stream = {};
 | 
					    Gap_Buffer_Stream stream = {};
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -12,6 +12,11 @@
 | 
				
			||||||
#if !defined(FRED_BUFFER_H)
 | 
					#if !defined(FRED_BUFFER_H)
 | 
				
			||||||
#define FRED_BUFFER_H
 | 
					#define FRED_BUFFER_H
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					struct String_Array{
 | 
				
			||||||
 | 
					    String *vals;
 | 
				
			||||||
 | 
					    i32 count;
 | 
				
			||||||
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
struct Cursor_With_Index{
 | 
					struct Cursor_With_Index{
 | 
				
			||||||
    i32 pos;
 | 
					    i32 pos;
 | 
				
			||||||
    i32 index;
 | 
					    i32 index;
 | 
				
			||||||
| 
						 | 
					@ -230,4 +235,4 @@ struct Buffer_Render_State{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// BOTTOM
 | 
					// BOTTOM
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										15
									
								
								4ed_edit.cpp
								
								
								
								
							
							
						
						
									
										15
									
								
								4ed_edit.cpp
								
								
								
								
							| 
						 | 
					@ -244,12 +244,7 @@ edit_single__inner(System_Functions *system, Models *models, Editing_File *file,
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    // NOTE(allen): token fixing
 | 
					    // NOTE(allen): token fixing
 | 
				
			||||||
    if (file->settings.tokens_exist){
 | 
					    if (file->settings.tokens_exist){
 | 
				
			||||||
        if (!file->settings.virtual_white){
 | 
					        file_relex(system, models, file, start, end, shift_amount);
 | 
				
			||||||
            file_relex_parallel(system, models, file, start, end, shift_amount);
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        else{
 | 
					 | 
				
			||||||
            file_relex_serial(models, file, start, end, shift_amount);
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    // NOTE(allen): meta data
 | 
					    // NOTE(allen): meta data
 | 
				
			||||||
| 
						 | 
					@ -365,13 +360,7 @@ edit_batch(System_Functions *system, Models *models, Editing_File *file,
 | 
				
			||||||
                // TODO(allen): Write a smart fast one here someday.
 | 
					                // TODO(allen): Write a smart fast one here someday.
 | 
				
			||||||
                Buffer_Edit *first_edit = batch;
 | 
					                Buffer_Edit *first_edit = batch;
 | 
				
			||||||
                Buffer_Edit *last_edit = batch + batch_size - 1;
 | 
					                Buffer_Edit *last_edit = batch + batch_size - 1;
 | 
				
			||||||
                
 | 
					                file_relex(system, models, file, first_edit->start, last_edit->end, shift_total);
 | 
				
			||||||
                if (!file->settings.virtual_white){
 | 
					 | 
				
			||||||
                    file_relex_parallel(system, models, file, first_edit->start, last_edit->end, shift_total);
 | 
					 | 
				
			||||||
                }
 | 
					 | 
				
			||||||
                else{
 | 
					 | 
				
			||||||
                    file_relex_serial(models, file, first_edit->start, last_edit->end, shift_total);
 | 
					 | 
				
			||||||
                }
 | 
					 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
        }break;
 | 
					        }break;
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										11
									
								
								4ed_file.cpp
								
								
								
								
							
							
						
						
									
										11
									
								
								4ed_file.cpp
								
								
								
								
							| 
						 | 
					@ -137,7 +137,7 @@ edit_pos_get_new(Editing_File *file, i32 index){
 | 
				
			||||||
////////////////////////////////
 | 
					////////////////////////////////
 | 
				
			||||||
 | 
					
 | 
				
			||||||
inline b32
 | 
					inline b32
 | 
				
			||||||
buffer_needs_save(Editing_File *file){
 | 
					file_needs_save(Editing_File *file){
 | 
				
			||||||
    b32 result = false;
 | 
					    b32 result = false;
 | 
				
			||||||
    if (file->state.dirty == DirtyState_UnsavedChanges){
 | 
					    if (file->state.dirty == DirtyState_UnsavedChanges){
 | 
				
			||||||
        result = true;
 | 
					        result = true;
 | 
				
			||||||
| 
						 | 
					@ -146,7 +146,7 @@ buffer_needs_save(Editing_File *file){
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
inline b32
 | 
					inline b32
 | 
				
			||||||
buffer_can_save(Editing_File *file){
 | 
					file_can_save(Editing_File *file){
 | 
				
			||||||
    b32 result = false;
 | 
					    b32 result = false;
 | 
				
			||||||
    if (file->state.dirty == DirtyState_UnsavedChanges ||
 | 
					    if (file->state.dirty == DirtyState_UnsavedChanges ||
 | 
				
			||||||
        file->state.dirty == DirtyState_UnloadedChanges){
 | 
					        file->state.dirty == DirtyState_UnloadedChanges){
 | 
				
			||||||
| 
						 | 
					@ -554,12 +554,7 @@ file_create_from_string(System_Functions *system, Models *models, Editing_File *
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    if (file->settings.tokens_exist && file->state.token_array.tokens == 0){
 | 
					    if (file->settings.tokens_exist && file->state.token_array.tokens == 0){
 | 
				
			||||||
        if (!file->settings.virtual_white){
 | 
					        file_first_lex(system, models, file);
 | 
				
			||||||
            file_first_lex_parallel(system, models, file);
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        else{
 | 
					 | 
				
			||||||
            file_first_lex_serial(models, file);
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    file->settings.is_initialized = true;
 | 
					    file->settings.is_initialized = true;
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										324
									
								
								4ed_file_lex.cpp
								
								
								
								
							
							
						
						
									
										324
									
								
								4ed_file_lex.cpp
								
								
								
								
							| 
						 | 
					@ -9,8 +9,26 @@
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// TOP
 | 
					// TOP
 | 
				
			||||||
 | 
					
 | 
				
			||||||
internal
 | 
					internal String_Array
 | 
				
			||||||
Job_Callback_Sig(job_full_lex){
 | 
					file_lex_chunks(Partition *part, Gap_Buffer *buffer){
 | 
				
			||||||
 | 
					    String_Array result = {};
 | 
				
			||||||
 | 
					    result.vals = push_array(part, String, 0);
 | 
				
			||||||
 | 
					    buffer_get_chunks(part, buffer);
 | 
				
			||||||
 | 
					    {
 | 
				
			||||||
 | 
					        String *s = push_array(part, String, 1);
 | 
				
			||||||
 | 
					        block_zero(s, sizeof(*s));
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    result.count = (i32)(push_array(part, String, 0) - result.vals);
 | 
				
			||||||
 | 
					    return(result);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					internal void
 | 
				
			||||||
 | 
					file_lex_mark_new_tokens(System_Functions *system, Models *models, Editing_File *file){
 | 
				
			||||||
 | 
					    // TODO(allen): Figure out what we want to do to mark these files.
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					internal void
 | 
				
			||||||
 | 
					job_full_lex(System_Functions *system, Thread_Context *thread, Thread_Memory *memory, void *data[4]){
 | 
				
			||||||
    Editing_File *file = (Editing_File*)data[0];
 | 
					    Editing_File *file = (Editing_File*)data[0];
 | 
				
			||||||
    Heap *heap = (Heap*)data[1];
 | 
					    Heap *heap = (Heap*)data[1];
 | 
				
			||||||
    Models *models = (Models*)data[2];
 | 
					    Models *models = (Models*)data[2];
 | 
				
			||||||
| 
						 | 
					@ -45,30 +63,22 @@ Job_Callback_Sig(job_full_lex){
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    Cpp_Lex_Data lex = cpp_lex_data_init(file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
					    Cpp_Lex_Data lex = cpp_lex_data_init(file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    // TODO(allen): deduplicate this against relex
 | 
					    String chunk_space[3];
 | 
				
			||||||
    char *chunks[3];
 | 
					    Partition chunk_part = make_part(chunk_space, sizeof(chunk_space));
 | 
				
			||||||
    i32 chunk_sizes[3];
 | 
					    String_Array chunks = file_lex_chunks(&chunk_part, buffer);
 | 
				
			||||||
    chunks[0] = buffer->data;
 | 
					 | 
				
			||||||
    chunk_sizes[0] = buffer->size1;
 | 
					 | 
				
			||||||
    chunks[1] = buffer->data + buffer->size1 + buffer->gap_size;
 | 
					 | 
				
			||||||
    chunk_sizes[1] = buffer->size2;
 | 
					 | 
				
			||||||
    chunks[2] = 0;
 | 
					 | 
				
			||||||
    chunk_sizes[2] = 0;
 | 
					 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    i32 chunk_index = 0;
 | 
					    i32 chunk_index = 0;
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    do{
 | 
					    do{
 | 
				
			||||||
        char *chunk = chunks[chunk_index];
 | 
					        char *chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
        i32 chunk_size = chunk_sizes[chunk_index];
 | 
					        i32 chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
        
 | 
					 | 
				
			||||||
        i32 result =
 | 
					 | 
				
			||||||
            cpp_lex_step(&lex, chunk, chunk_size, text_size, &tokens, 2048);
 | 
					 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
 | 
					        i32 result = cpp_lex_step(&lex, chunk, chunk_size, text_size, &tokens, 2048);
 | 
				
			||||||
        switch (result){
 | 
					        switch (result){
 | 
				
			||||||
            case LexResult_NeedChunk:
 | 
					            case LexResult_NeedChunk:
 | 
				
			||||||
            {
 | 
					            {
 | 
				
			||||||
                ++chunk_index;
 | 
					                ++chunk_index;
 | 
				
			||||||
                Assert(chunk_index < ArrayCount(chunks));
 | 
					                Assert(chunk_index < chunks.count);
 | 
				
			||||||
            }break;
 | 
					            }break;
 | 
				
			||||||
            
 | 
					            
 | 
				
			||||||
            case LexResult_NeedTokenMemory:
 | 
					            case LexResult_NeedTokenMemory:
 | 
				
			||||||
| 
						 | 
					@ -128,18 +138,15 @@ Job_Callback_Sig(job_full_lex){
 | 
				
			||||||
        file_token_array->tokens = file->state.swap_array.tokens;
 | 
					        file_token_array->tokens = file->state.swap_array.tokens;
 | 
				
			||||||
        file->state.swap_array.tokens = 0;
 | 
					        file->state.swap_array.tokens = 0;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    system->release_lock(FRAME_LOCK);
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
    // NOTE(allen): These are outside the locked section because I don't
 | 
					 | 
				
			||||||
    // think getting these out of order will cause critical bugs, and I
 | 
					 | 
				
			||||||
    // want to minimize what's done in locked sections.
 | 
					 | 
				
			||||||
    file->state.tokens_complete = true;
 | 
					    file->state.tokens_complete = true;
 | 
				
			||||||
    file->state.still_lexing = false;
 | 
					    file->state.still_lexing = false;
 | 
				
			||||||
 | 
					    file_lex_mark_new_tokens(system, models, file);
 | 
				
			||||||
 | 
					    system->release_lock(FRAME_LOCK);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
internal void
 | 
					internal void
 | 
				
			||||||
file_kill_tokens(System_Functions *system, Heap *heap, Editing_File *file){
 | 
					file_kill_tokens(System_Functions *system, Heap *heap, Editing_File *file){
 | 
				
			||||||
    file->settings.tokens_exist = 0;
 | 
					    file->settings.tokens_exist = false;
 | 
				
			||||||
    if (file->state.still_lexing){
 | 
					    if (file->state.still_lexing){
 | 
				
			||||||
        system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
 | 
					        system->cancel_job(BACKGROUND_THREADS, file->state.lex_job);
 | 
				
			||||||
        if (file->state.swap_array.tokens){
 | 
					        if (file->state.swap_array.tokens){
 | 
				
			||||||
| 
						 | 
					@ -159,7 +166,7 @@ file_first_lex_parallel(System_Functions *system, Models *models, Editing_File *
 | 
				
			||||||
    Heap *heap = &models->mem.heap;
 | 
					    Heap *heap = &models->mem.heap;
 | 
				
			||||||
    file->settings.tokens_exist = true;
 | 
					    file->settings.tokens_exist = true;
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    if (file->is_loading == 0 && file->state.still_lexing == 0){
 | 
					    if (!file->is_loading && !file->state.still_lexing){
 | 
				
			||||||
        Assert(file->state.token_array.tokens == 0);
 | 
					        Assert(file->state.token_array.tokens == 0);
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        file->state.tokens_complete = false;
 | 
					        file->state.tokens_complete = false;
 | 
				
			||||||
| 
						 | 
					@ -175,7 +182,7 @@ file_first_lex_parallel(System_Functions *system, Models *models, Editing_File *
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
internal void
 | 
					internal void
 | 
				
			||||||
file_first_lex_serial(Models *models, Editing_File *file){
 | 
					file_first_lex_serial(System_Functions *system, Models *models, Editing_File *file){
 | 
				
			||||||
    Mem_Options *mem = &models->mem;
 | 
					    Mem_Options *mem = &models->mem;
 | 
				
			||||||
    Partition *part = &mem->part;
 | 
					    Partition *part = &mem->part;
 | 
				
			||||||
    Heap *heap = &mem->heap;
 | 
					    Heap *heap = &mem->heap;
 | 
				
			||||||
| 
						 | 
					@ -186,103 +193,96 @@ file_first_lex_serial(Models *models, Editing_File *file){
 | 
				
			||||||
    if (file->is_loading == 0){
 | 
					    if (file->is_loading == 0){
 | 
				
			||||||
        Assert(file->state.token_array.tokens == 0);
 | 
					        Assert(file->state.token_array.tokens == 0);
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        {
 | 
					        Temp_Memory temp = begin_temp_memory(part);
 | 
				
			||||||
            Temp_Memory temp = begin_temp_memory(part);
 | 
					        
 | 
				
			||||||
 | 
					        Parse_Context parse_context = parse_context_get(&models->parse_context_memory, file->settings.parse_context_id, partition_current(part), partition_remaining(part));
 | 
				
			||||||
 | 
					        Assert(parse_context.valid);
 | 
				
			||||||
 | 
					        push_array(part, char, (i32)parse_context.memory_size);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        Gap_Buffer *buffer = &file->state.buffer;
 | 
				
			||||||
 | 
					        i32 text_size = buffer_size(buffer);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        i32 mem_size = partition_remaining(part);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        Cpp_Token_Array new_tokens;
 | 
				
			||||||
 | 
					        new_tokens.max_count = mem_size/sizeof(Cpp_Token);
 | 
				
			||||||
 | 
					        new_tokens.count = 0;
 | 
				
			||||||
 | 
					        new_tokens.tokens = push_array(part, Cpp_Token, new_tokens.max_count);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        b32 still_lexing = true;
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        Cpp_Lex_Data lex = cpp_lex_data_init(file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        String chunk_space[3];
 | 
				
			||||||
 | 
					        Partition chunk_part = make_part(chunk_space, sizeof(chunk_space));
 | 
				
			||||||
 | 
					        String_Array chunks = file_lex_chunks(&chunk_part, buffer);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        i32 chunk_index = 0;
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        Cpp_Token_Array *swap_array = &file->state.swap_array;
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        do{
 | 
				
			||||||
 | 
					            char *chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
 | 
					            i32 chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
            
 | 
					            
 | 
				
			||||||
            Parse_Context parse_context = parse_context_get(&models->parse_context_memory, file->settings.parse_context_id, partition_current(part), partition_remaining(part));
 | 
					            i32 result = cpp_lex_step(&lex, chunk, chunk_size, text_size, &new_tokens, NO_OUT_LIMIT);
 | 
				
			||||||
            Assert(parse_context.valid);
 | 
					 | 
				
			||||||
            push_array(part, char, (i32)parse_context.memory_size);
 | 
					 | 
				
			||||||
            
 | 
					            
 | 
				
			||||||
            Gap_Buffer *buffer = &file->state.buffer;
 | 
					            switch (result){
 | 
				
			||||||
            i32 text_size = buffer_size(buffer);
 | 
					                case LexResult_NeedChunk:
 | 
				
			||||||
            
 | 
					                {
 | 
				
			||||||
            i32 mem_size = partition_remaining(part);
 | 
					                    ++chunk_index;
 | 
				
			||||||
            
 | 
					                    Assert(chunk_index < chunks.count);
 | 
				
			||||||
            Cpp_Token_Array new_tokens;
 | 
					                }break;
 | 
				
			||||||
            new_tokens.max_count = mem_size/sizeof(Cpp_Token);
 | 
					 | 
				
			||||||
            new_tokens.count = 0;
 | 
					 | 
				
			||||||
            new_tokens.tokens = push_array(part, Cpp_Token, new_tokens.max_count);
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            b32 still_lexing = true;
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            Cpp_Lex_Data lex = cpp_lex_data_init(file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            // TODO(allen): deduplicate this against relex
 | 
					 | 
				
			||||||
            char *chunks[3];
 | 
					 | 
				
			||||||
            i32 chunk_sizes[3];
 | 
					 | 
				
			||||||
            chunks[0] = buffer->data;
 | 
					 | 
				
			||||||
            chunk_sizes[0] = buffer->size1;
 | 
					 | 
				
			||||||
            chunks[1] = buffer->data + buffer->size1 + buffer->gap_size;
 | 
					 | 
				
			||||||
            chunk_sizes[1] = buffer->size2;
 | 
					 | 
				
			||||||
            chunks[2] = 0;
 | 
					 | 
				
			||||||
            chunk_sizes[2] = 0;
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            i32 chunk_index = 0;
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            Cpp_Token_Array *swap_array = &file->state.swap_array;
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            do{
 | 
					 | 
				
			||||||
                char *chunk = chunks[chunk_index];
 | 
					 | 
				
			||||||
                i32 chunk_size = chunk_sizes[chunk_index];
 | 
					 | 
				
			||||||
                
 | 
					                
 | 
				
			||||||
                i32 result = cpp_lex_step(&lex, chunk, chunk_size, text_size, &new_tokens, NO_OUT_LIMIT);
 | 
					                case LexResult_Finished:
 | 
				
			||||||
 | 
					                case LexResult_NeedTokenMemory:
 | 
				
			||||||
 | 
					                {
 | 
				
			||||||
 | 
					                    u32 new_max = l_round_up_u32(swap_array->count + new_tokens.count + 1, KB(1));
 | 
				
			||||||
 | 
					                    if (swap_array->tokens == 0){
 | 
				
			||||||
 | 
					                        swap_array->tokens = heap_array(heap, Cpp_Token, new_max);
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
 | 
					                    else{
 | 
				
			||||||
 | 
					                        u32 old_count = swap_array->count;
 | 
				
			||||||
 | 
					                        Cpp_Token *new_token_mem = heap_array(heap, Cpp_Token, new_max);
 | 
				
			||||||
 | 
					                        memcpy(new_token_mem, swap_array->tokens, sizeof(*new_token_mem)*old_count);
 | 
				
			||||||
 | 
					                        heap_free(heap, swap_array->tokens);
 | 
				
			||||||
 | 
					                        swap_array->tokens = new_token_mem;
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
 | 
					                    swap_array->max_count = new_max;
 | 
				
			||||||
 | 
					                    
 | 
				
			||||||
 | 
					                    Assert(swap_array->count + new_tokens.count <= swap_array->max_count);
 | 
				
			||||||
 | 
					                    memcpy(swap_array->tokens + swap_array->count, new_tokens.tokens, new_tokens.count*sizeof(Cpp_Token));
 | 
				
			||||||
 | 
					                    swap_array->count += new_tokens.count;
 | 
				
			||||||
 | 
					                    new_tokens.count = 0;
 | 
				
			||||||
 | 
					                    
 | 
				
			||||||
 | 
					                    if (result == LexResult_Finished){
 | 
				
			||||||
 | 
					                        still_lexing = false;
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
 | 
					                }break;
 | 
				
			||||||
                
 | 
					                
 | 
				
			||||||
                switch (result){
 | 
					                case LexResult_HitTokenLimit: InvalidCodePath;
 | 
				
			||||||
                    case LexResult_NeedChunk:
 | 
					 | 
				
			||||||
                    {
 | 
					 | 
				
			||||||
                        ++chunk_index;
 | 
					 | 
				
			||||||
                        Assert(chunk_index < ArrayCount(chunks));
 | 
					 | 
				
			||||||
                    }break;
 | 
					 | 
				
			||||||
                    
 | 
					 | 
				
			||||||
                    case LexResult_Finished:
 | 
					 | 
				
			||||||
                    case LexResult_NeedTokenMemory:
 | 
					 | 
				
			||||||
                    {
 | 
					 | 
				
			||||||
                        u32 new_max = l_round_up_u32(swap_array->count + new_tokens.count + 1, KB(1));
 | 
					 | 
				
			||||||
                        if (swap_array->tokens == 0){
 | 
					 | 
				
			||||||
                            swap_array->tokens = heap_array(heap, Cpp_Token, new_max);
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                        else{
 | 
					 | 
				
			||||||
                            u32 old_count = swap_array->count;
 | 
					 | 
				
			||||||
                            Cpp_Token *new_token_mem = heap_array(heap, Cpp_Token, new_max);
 | 
					 | 
				
			||||||
                            memcpy(new_token_mem, swap_array->tokens, sizeof(*new_token_mem)*old_count);
 | 
					 | 
				
			||||||
                            heap_free(heap, swap_array->tokens);
 | 
					 | 
				
			||||||
                            swap_array->tokens = new_token_mem;
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                        swap_array->max_count = new_max;
 | 
					 | 
				
			||||||
                        
 | 
					 | 
				
			||||||
                        Assert(swap_array->count + new_tokens.count <= swap_array->max_count);
 | 
					 | 
				
			||||||
                        memcpy(swap_array->tokens + swap_array->count, new_tokens.tokens, new_tokens.count*sizeof(Cpp_Token));
 | 
					 | 
				
			||||||
                        swap_array->count += new_tokens.count;
 | 
					 | 
				
			||||||
                        new_tokens.count = 0;
 | 
					 | 
				
			||||||
                        
 | 
					 | 
				
			||||||
                        if (result == LexResult_Finished){
 | 
					 | 
				
			||||||
                            still_lexing = false;
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
                    }break;
 | 
					 | 
				
			||||||
                    
 | 
					 | 
				
			||||||
                    case LexResult_HitTokenLimit: InvalidCodePath;
 | 
					 | 
				
			||||||
                }
 | 
					 | 
				
			||||||
            } while (still_lexing);
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            Cpp_Token_Array *token_array = &file->state.token_array;
 | 
					 | 
				
			||||||
            token_array->count = swap_array->count;
 | 
					 | 
				
			||||||
            token_array->max_count = swap_array->max_count;
 | 
					 | 
				
			||||||
            if (token_array->tokens != 0){
 | 
					 | 
				
			||||||
                heap_free(heap, token_array->tokens);
 | 
					 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
            token_array->tokens = swap_array->tokens;
 | 
					        } while (still_lexing);
 | 
				
			||||||
            
 | 
					        
 | 
				
			||||||
            swap_array->tokens = 0;
 | 
					        Cpp_Token_Array *token_array = &file->state.token_array;
 | 
				
			||||||
            swap_array->count = 0;
 | 
					        token_array->count = swap_array->count;
 | 
				
			||||||
            swap_array->max_count = 0;
 | 
					        token_array->max_count = swap_array->max_count;
 | 
				
			||||||
            
 | 
					        if (token_array->tokens != 0){
 | 
				
			||||||
            file->state.tokens_complete = true;
 | 
					            heap_free(heap, token_array->tokens);
 | 
				
			||||||
            file->state.still_lexing = false;
 | 
					 | 
				
			||||||
            
 | 
					 | 
				
			||||||
            end_temp_memory(temp);
 | 
					 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					        token_array->tokens = swap_array->tokens;
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        swap_array->tokens = 0;
 | 
				
			||||||
 | 
					        swap_array->count = 0;
 | 
				
			||||||
 | 
					        swap_array->max_count = 0;
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        file->state.tokens_complete = true;
 | 
					        file->state.tokens_complete = true;
 | 
				
			||||||
 | 
					        file->state.still_lexing = false;
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        end_temp_memory(temp);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        file->state.tokens_complete = true;
 | 
				
			||||||
 | 
					        file_lex_mark_new_tokens(system, models, file);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -294,7 +294,7 @@ file_relex_parallel(System_Functions *system, Models *models, Editing_File *file
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    if (file->state.token_array.tokens == 0){
 | 
					    if (file->state.token_array.tokens == 0){
 | 
				
			||||||
        file_first_lex_parallel(system, models, file);
 | 
					        file_first_lex_parallel(system, models, file);
 | 
				
			||||||
        return(false);
 | 
					        return(true);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    b32 result = true;
 | 
					    b32 result = true;
 | 
				
			||||||
| 
						 | 
					@ -323,30 +323,22 @@ file_relex_parallel(System_Functions *system, Models *models, Editing_File *file
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        Cpp_Relex_Data state = cpp_relex_init(array, start_i, end_i, shift_amount, file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
					        Cpp_Relex_Data state = cpp_relex_init(array, start_i, end_i, shift_amount, file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        char *chunks[3];
 | 
					        String chunk_space[3];
 | 
				
			||||||
        i32 chunk_sizes[3];
 | 
					        Partition chunk_part = make_part(chunk_space, sizeof(chunk_space));
 | 
				
			||||||
        
 | 
					        String_Array chunks = file_lex_chunks(&chunk_part, buffer);
 | 
				
			||||||
        chunks[0] = buffer->data;
 | 
					 | 
				
			||||||
        chunk_sizes[0] = buffer->size1;
 | 
					 | 
				
			||||||
        
 | 
					 | 
				
			||||||
        chunks[1] = buffer->data + buffer->size1 + buffer->gap_size;
 | 
					 | 
				
			||||||
        chunk_sizes[1] = buffer->size2;
 | 
					 | 
				
			||||||
        
 | 
					 | 
				
			||||||
        chunks[2] = 0;
 | 
					 | 
				
			||||||
        chunk_sizes[2] = 0;
 | 
					 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        i32 chunk_index = 0;
 | 
					        i32 chunk_index = 0;
 | 
				
			||||||
        char *chunk = chunks[chunk_index];
 | 
					        char *chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
        i32 chunk_size = chunk_sizes[chunk_index];
 | 
					        i32 chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        while (!cpp_relex_is_start_chunk(&state, chunk, chunk_size)){
 | 
					        for (;!cpp_relex_is_start_chunk(&state, chunk, chunk_size);){
 | 
				
			||||||
            ++chunk_index;
 | 
					            ++chunk_index;
 | 
				
			||||||
            Assert(chunk_index < ArrayCount(chunks));
 | 
					            Assert(chunk_index < chunks.count);
 | 
				
			||||||
            chunk = chunks[chunk_index];
 | 
					            chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
            chunk_size = chunk_sizes[chunk_index];
 | 
					            chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        for(;;){
 | 
					        for (;;){
 | 
				
			||||||
            Cpp_Lex_Result lex_result =
 | 
					            Cpp_Lex_Result lex_result =
 | 
				
			||||||
                cpp_relex_step(&state, chunk, chunk_size, size, array, &relex_array);
 | 
					                cpp_relex_step(&state, chunk, chunk_size, size, array, &relex_array);
 | 
				
			||||||
            
 | 
					            
 | 
				
			||||||
| 
						 | 
					@ -354,9 +346,9 @@ file_relex_parallel(System_Functions *system, Models *models, Editing_File *file
 | 
				
			||||||
                case LexResult_NeedChunk:
 | 
					                case LexResult_NeedChunk:
 | 
				
			||||||
                {
 | 
					                {
 | 
				
			||||||
                    ++chunk_index;
 | 
					                    ++chunk_index;
 | 
				
			||||||
                    Assert(chunk_index < ArrayCount(chunks));
 | 
					                    Assert(chunk_index < chunks.count);
 | 
				
			||||||
                    chunk = chunks[chunk_index];
 | 
					                    chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
                    chunk_size = chunk_sizes[chunk_index];
 | 
					                    chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
                }break;
 | 
					                }break;
 | 
				
			||||||
                
 | 
					                
 | 
				
			||||||
                case LexResult_NeedTokenMemory:
 | 
					                case LexResult_NeedTokenMemory:
 | 
				
			||||||
| 
						 | 
					@ -381,6 +373,7 @@ file_relex_parallel(System_Functions *system, Models *models, Editing_File *file
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
            
 | 
					            
 | 
				
			||||||
            cpp_relex_complete(&state, array, &relex_array);
 | 
					            cpp_relex_complete(&state, array, &relex_array);
 | 
				
			||||||
 | 
					            file_lex_mark_new_tokens(system, models, file);
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
        else{
 | 
					        else{
 | 
				
			||||||
            cpp_relex_abort(&state, array);
 | 
					            cpp_relex_abort(&state, array);
 | 
				
			||||||
| 
						 | 
					@ -425,14 +418,14 @@ file_relex_parallel(System_Functions *system, Models *models, Editing_File *file
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
internal b32
 | 
					internal b32
 | 
				
			||||||
file_relex_serial(Models *models, Editing_File *file, i32 start_i, i32 end_i, i32 shift_amount){
 | 
					file_relex_serial(System_Functions *system, Models *models, Editing_File *file, i32 start_i, i32 end_i, i32 shift_amount){
 | 
				
			||||||
    Mem_Options *mem = &models->mem;
 | 
					    Mem_Options *mem = &models->mem;
 | 
				
			||||||
    Heap *heap = &mem->heap;
 | 
					    Heap *heap = &mem->heap;
 | 
				
			||||||
    Partition *part = &mem->part;
 | 
					    Partition *part = &mem->part;
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    if (file->state.token_array.tokens == 0){
 | 
					    if (file->state.token_array.tokens == 0){
 | 
				
			||||||
        file_first_lex_serial(models, file);
 | 
					        file_first_lex_serial(system, models, file);
 | 
				
			||||||
        return(1);
 | 
					        return(true);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    Assert(!file->state.still_lexing);
 | 
					    Assert(!file->state.still_lexing);
 | 
				
			||||||
| 
						 | 
					@ -454,27 +447,19 @@ file_relex_serial(Models *models, Editing_File *file, i32 start_i, i32 end_i, i3
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    Cpp_Relex_Data state = cpp_relex_init(array, start_i, end_i, shift_amount, file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
					    Cpp_Relex_Data state = cpp_relex_init(array, start_i, end_i, shift_amount, file->settings.tokens_without_strings, parse_context.kw_table, parse_context.pp_table);
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    char *chunks[3];
 | 
					    String chunk_space[3];
 | 
				
			||||||
    i32 chunk_sizes[3];
 | 
					    Partition chunk_part = make_part(chunk_space, sizeof(chunk_space));
 | 
				
			||||||
    
 | 
					    String_Array chunks = file_lex_chunks(&chunk_part, buffer);
 | 
				
			||||||
    chunks[0] = buffer->data;
 | 
					 | 
				
			||||||
    chunk_sizes[0] = buffer->size1;
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
    chunks[1] = buffer->data + buffer->size1 + buffer->gap_size;
 | 
					 | 
				
			||||||
    chunk_sizes[1] = buffer->size2;
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
    chunks[2] = 0;
 | 
					 | 
				
			||||||
    chunk_sizes[2] = 0;
 | 
					 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    i32 chunk_index = 0;
 | 
					    i32 chunk_index = 0;
 | 
				
			||||||
    char *chunk = chunks[chunk_index];
 | 
					    char *chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
    i32 chunk_size = chunk_sizes[chunk_index];
 | 
					    i32 chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    while (!cpp_relex_is_start_chunk(&state, chunk, chunk_size)){
 | 
					    for (;!cpp_relex_is_start_chunk(&state, chunk, chunk_size);){
 | 
				
			||||||
        ++chunk_index;
 | 
					        ++chunk_index;
 | 
				
			||||||
        Assert(chunk_index < ArrayCount(chunks));
 | 
					        Assert(chunk_index < chunks.count);
 | 
				
			||||||
        chunk = chunks[chunk_index];
 | 
					        chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
        chunk_size = chunk_sizes[chunk_index];
 | 
					        chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    for(;;){
 | 
					    for(;;){
 | 
				
			||||||
| 
						 | 
					@ -484,9 +469,9 @@ file_relex_serial(Models *models, Editing_File *file, i32 start_i, i32 end_i, i3
 | 
				
			||||||
            case LexResult_NeedChunk:
 | 
					            case LexResult_NeedChunk:
 | 
				
			||||||
            {
 | 
					            {
 | 
				
			||||||
                ++chunk_index;
 | 
					                ++chunk_index;
 | 
				
			||||||
                Assert(chunk_index < ArrayCount(chunks));
 | 
					                Assert(chunk_index < chunks.count);
 | 
				
			||||||
                chunk = chunks[chunk_index];
 | 
					                chunk = chunks.vals[chunk_index].str;
 | 
				
			||||||
                chunk_size = chunk_sizes[chunk_index];
 | 
					                chunk_size = chunks.vals[chunk_index].size;
 | 
				
			||||||
            }break;
 | 
					            }break;
 | 
				
			||||||
            
 | 
					            
 | 
				
			||||||
            case LexResult_NeedTokenMemory: InvalidCodePath;
 | 
					            case LexResult_NeedTokenMemory: InvalidCodePath;
 | 
				
			||||||
| 
						 | 
					@ -507,11 +492,32 @@ file_relex_serial(Models *models, Editing_File *file, i32 start_i, i32 end_i, i3
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    cpp_relex_complete(&state, array, &relex_array);
 | 
					    cpp_relex_complete(&state, array, &relex_array);
 | 
				
			||||||
 | 
					    file_lex_mark_new_tokens(system, models, file);
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    end_temp_memory(temp);
 | 
					    end_temp_memory(temp);
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    return(1);
 | 
					    return(1);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					internal void
 | 
				
			||||||
 | 
					file_first_lex(System_Functions *system, Models *models, Editing_File *file){
 | 
				
			||||||
 | 
					    if (!file->settings.virtual_white){
 | 
				
			||||||
 | 
					        file_first_lex_parallel(system, models, file);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    else{
 | 
				
			||||||
 | 
					        file_first_lex_serial(system, models, file);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					internal void
 | 
				
			||||||
 | 
					file_relex(System_Functions *system, Models *models, Editing_File *file, i32 start, i32 end, i32 shift_amount){
 | 
				
			||||||
 | 
					    if (!file->settings.virtual_white){
 | 
				
			||||||
 | 
					        file_relex_parallel(system, models, file, start, end, shift_amount);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    else{
 | 
				
			||||||
 | 
					        file_relex_serial(system, models, file, start, end, shift_amount);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// BOTTOM
 | 
					// BOTTOM
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -147,7 +147,8 @@ System_Functions *system,                \
 | 
				
			||||||
Thread_Context *thread,                  \
 | 
					Thread_Context *thread,                  \
 | 
				
			||||||
Thread_Memory *memory,                   \
 | 
					Thread_Memory *memory,                   \
 | 
				
			||||||
void *data[4])
 | 
					void *data[4])
 | 
				
			||||||
typedef Job_Callback_Sig(Job_Callback);
 | 
					
 | 
				
			||||||
 | 
					typedef void Job_Callback(System_Functions *system, Thread_Context *thread, Thread_Memory *memory, void *data[4]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
struct Job_Data{
 | 
					struct Job_Data{
 | 
				
			||||||
    Job_Callback *callback;
 | 
					    Job_Callback *callback;
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1,5 +1,5 @@
 | 
				
			||||||
1
 | 
					1
 | 
				
			||||||
0
 | 
					0
 | 
				
			||||||
122
 | 
					123
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue