00001
00002
00003
00004
00005
00006
00007
00008
00009
00023 #include <stdio.h>
00024 #include <stdlib.h>
00025 #include <string.h>
00026 #include <ctype.h>
00027 #include <limits.h>
00028 #include <unistd.h>
00029 #include <map>
00030 #include <set>
00031 #include <stack>
00032
00033 #ifndef PATH_MAX
00034
00035 # define PATH_MAX 260
00036 #endif
00037
00039 struct StringCompare {
00046 bool operator () (const char *a, const char *b) const
00047 {
00048 return strcmp(a, b) < 0;
00049 }
00050 };
00052 typedef std::set<const char*, StringCompare> StringSet;
00054 typedef std::map<const char*, StringSet*, StringCompare> StringMap;
00056 typedef std::pair<const char*, StringSet*> StringMapItem;
00057
00059 static StringSet _include_dirs;
00061 static StringMap _files;
00063 static StringMap _headers;
00065 static StringSet _defines;
00066
00070 class File {
00071 public:
00077 File(const char *filename) : filename(filename)
00078 {
00079 this->fp = fopen(filename, "r");
00080 if (this->fp == NULL) {
00081 fprintf(stdout, "Could not open %s for reading\n", filename);
00082 exit(1);
00083 }
00084 this->dirname = strdup(filename);
00085 char *last = strrchr(this->dirname, '/');
00086 if (last != NULL) {
00087 *last = '\0';
00088 } else {
00089 *this->dirname = '\0';
00090 }
00091 }
00092
00094 ~File()
00095 {
00096 fclose(this->fp);
00097 free(this->dirname);
00098 }
00099
00105 char GetChar() const
00106 {
00107 int c = fgetc(this->fp);
00108 return (c == EOF) ? '\0' : c;
00109 }
00110
00115 const char *GetDirname() const
00116 {
00117 return this->dirname;
00118 }
00119
00120 private:
00121 FILE *fp;
00122 char *dirname;
00123 const char *filename;
00124 };
00125
00127 enum Token {
00128 TOKEN_UNKNOWN,
00129 TOKEN_END,
00130 TOKEN_EOL,
00131 TOKEN_SHARP,
00132 TOKEN_LOCAL,
00133 TOKEN_GLOBAL,
00134 TOKEN_IDENTIFIER,
00135 TOKEN_DEFINE,
00136 TOKEN_IF,
00137 TOKEN_IFDEF,
00138 TOKEN_IFNDEF,
00139 TOKEN_ELIF,
00140 TOKEN_ELSE,
00141 TOKEN_ENDIF,
00142 TOKEN_UNDEF,
00143 TOKEN_OR,
00144 TOKEN_AND,
00145 TOKEN_DEFINED,
00146 TOKEN_OPEN,
00147 TOKEN_CLOSE,
00148 TOKEN_NOT,
00149 TOKEN_ZERO,
00150 TOKEN_INCLUDE,
00151 };
00152
00154 typedef std::map<const char*, Token, StringCompare> KeywordList;
00155
00159 class Lexer {
00160 public:
00165 Lexer(const File *file) : file(file), current_char('\0'), string(NULL), token(TOKEN_UNKNOWN)
00166 {
00167 this->keywords["define"] = TOKEN_DEFINE;
00168 this->keywords["defined"] = TOKEN_DEFINED;
00169 this->keywords["if"] = TOKEN_IF;
00170 this->keywords["ifdef"] = TOKEN_IFDEF;
00171 this->keywords["ifndef"] = TOKEN_IFNDEF;
00172 this->keywords["include"] = TOKEN_INCLUDE;
00173 this->keywords["elif"] = TOKEN_ELIF;
00174 this->keywords["else"] = TOKEN_ELSE;
00175 this->keywords["endif"] = TOKEN_ENDIF;
00176 this->keywords["undef"] = TOKEN_UNDEF;
00177
00178
00179 this->Next();
00180
00181
00182 this->buf_len = 32;
00183 this->buf = (char*)malloc(sizeof(*this->buf) * this->buf_len);
00184 }
00185
00187 ~Lexer()
00188 {
00189 free(this->buf);
00190 }
00191
00195 void Next()
00196 {
00197 this->current_char = this->file->GetChar();
00198 }
00199
00204 Token GetToken() const
00205 {
00206 return this->token;
00207 }
00208
00213 const char *GetString() const
00214 {
00215 return this->string;
00216 }
00217
00222 void Lex()
00223 {
00224 for (;;) {
00225 free(this->string);
00226 this->string = NULL;
00227 this->token = TOKEN_UNKNOWN;
00228
00229 switch (this->current_char) {
00230
00231 case '\0': this->token = TOKEN_END; return;
00232
00233
00234 case '\t': this->Next(); break;
00235 case '\r': this->Next(); break;
00236 case ' ': this->Next(); break;
00237
00238 case '\\':
00239 this->Next();
00240 if (this->current_char == '\n') this->Next();
00241 break;
00242
00243 case '\n':
00244 this->token = TOKEN_EOL;
00245 this->Next();
00246 return;
00247
00248 case '#':
00249 this->token = TOKEN_SHARP;
00250 this->Next();
00251 return;
00252
00253 case '"':
00254 this->ReadString('"', TOKEN_LOCAL);
00255 this->Next();
00256 return;
00257
00258 case '<':
00259 this->ReadString('>', TOKEN_GLOBAL);
00260 this->Next();
00261 return;
00262
00263 case '&':
00264 this->Next();
00265 if (this->current_char == '&') {
00266 this->Next();
00267 this->token = TOKEN_AND;
00268 return;
00269 }
00270 break;
00271
00272 case '|':
00273 this->Next();
00274 if (this->current_char == '|') {
00275 this->Next();
00276 this->token = TOKEN_OR;
00277 return;
00278 }
00279 break;
00280
00281 case '(':
00282 this->Next();
00283 this->token = TOKEN_OPEN;
00284 return;
00285
00286 case ')':
00287 this->Next();
00288 this->token = TOKEN_CLOSE;
00289 return;
00290
00291 case '!':
00292 this->Next();
00293 if (this->current_char != '=') {
00294 this->token = TOKEN_NOT;
00295 return;
00296 }
00297 break;
00298
00299
00300 case '/':
00301 this->Next();
00302 switch (this->current_char) {
00303 case '*': {
00304 this->Next();
00305 char previous_char = '\0';
00306 while ((this->current_char != '/' || previous_char != '*') && this->current_char != '\0') {
00307 previous_char = this->current_char;
00308 this->Next();
00309 }
00310 this->Next();
00311 break;
00312 }
00313 case '/': while (this->current_char != '\n' && this->current_char != '\0') this->Next(); break;
00314 default: break;
00315 }
00316 break;
00317
00318 default:
00319 if (isalpha(this->current_char) || this->current_char == '_') {
00320
00321 this->ReadIdentifier();
00322 return;
00323 }
00324 if (isdigit(this->current_char)) {
00325 bool zero = this->current_char == '0';
00326 this->Next();
00327 if (this->current_char == 'x' || this->current_char == 'X') Next();
00328 while (isdigit(this->current_char) || this->current_char == '.' || (this->current_char >= 'a' && this->current_char <= 'f') || (this->current_char >= 'A' && this->current_char <= 'F')) {
00329 zero &= this->current_char == '0';
00330 this->Next();
00331 }
00332 if (zero) this->token = TOKEN_ZERO;
00333 return;
00334 }
00335 this->Next();
00336 break;
00337 }
00338 }
00339 }
00340
00341 private:
00347 Token FindKeyword(const char *name) const
00348 {
00349 KeywordList::const_iterator it = this->keywords.find(name);
00350 if (it == this->keywords.end()) return TOKEN_IDENTIFIER;
00351 return (*it).second;
00352 }
00353
00357 void ReadIdentifier()
00358 {
00359 size_t count = 0;
00360
00361
00362 do {
00363 this->buf[count++] = this->current_char;
00364 this->Next();
00365
00366 if (count >= buf_len) {
00367
00368 this->buf_len *= 2;
00369 this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00370 }
00371 } while ((isalpha(this->current_char) || this->current_char == '_' || isdigit(this->current_char)));
00372 this->buf[count] = '\0';
00373
00374 free(this->string);
00375 this->string = strdup(this->buf);
00376 this->token = FindKeyword(this->string);
00377 }
00378
00384 void ReadString(char end, Token token)
00385 {
00386 size_t count = 0;
00387 this->Next();
00388 while (this->current_char != end && this->current_char != ')' && this->current_char != '\n' && this->current_char != '\0') {
00389 this->buf[count++] = this->current_char;
00390 this->Next();
00391
00392 if (count >= this->buf_len) {
00393
00394 this->buf_len *= 2;
00395 this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00396 }
00397 }
00398 this->buf[count] = '\0';
00399 free(this->string);
00400 this->string = strdup(this->buf);
00401 this->token = token;
00402 }
00403
00404 const File *file;
00405 char current_char;
00406 char *string;
00407 Token token;
00408 char *buf;
00409 size_t buf_len;
00410 KeywordList keywords;
00411 };
00412
00423 const char *GeneratePath(const char *dirname, const char *filename, bool local)
00424 {
00425 if (local) {
00426 if (access(filename, R_OK) == 0) return strdup(filename);
00427
00428 char path[PATH_MAX];
00429 strcpy(path, dirname);
00430 const char *p = filename;
00431
00432 while (*p == '.') {
00433 if (*(++p) == '.') {
00434 char *s = strrchr(path, '/');
00435 if (s != NULL) *s = '\0';
00436 p += 2;
00437 }
00438 }
00439 strcat(path, "/");
00440 strcat(path, p);
00441
00442 if (access(path, R_OK) == 0) return strdup(path);
00443 }
00444
00445 for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00446 char path[PATH_MAX];
00447 strcpy(path, *it);
00448 const char *p = filename;
00449
00450 while (*p == '.') {
00451 if (*(++p) == '.') {
00452 char *s = strrchr(path, '/');
00453 if (s != NULL) *s = '\0';
00454 p += 2;
00455 }
00456 }
00457 strcat(path, "/");
00458 strcat(path, p);
00459
00460 if (access(path, R_OK) == 0) return strdup(path);
00461 }
00462
00463 return NULL;
00464 }
00465
00473 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose);
00474
00482 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose);
00483
00492 bool ExpressionNot(Lexer *lexer, StringSet *defines, bool verbose)
00493 {
00494 if (lexer->GetToken() == TOKEN_NOT) {
00495 if (verbose) fprintf(stderr, "!");
00496 lexer->Lex();
00497 bool value = !ExpressionDefined(lexer, defines, verbose);
00498 if (verbose) fprintf(stderr, "[%d]", value);
00499 return value;
00500 }
00501
00502 if (lexer->GetToken() == TOKEN_OPEN) {
00503 if (verbose) fprintf(stderr, "(");
00504 lexer->Lex();
00505 bool value = ExpressionOr(lexer, defines, verbose);
00506 if (verbose) fprintf(stderr, ")[%d]", value);
00507 lexer->Lex();
00508 return value;
00509 }
00510
00511 if (lexer->GetToken() == TOKEN_ZERO) {
00512 if (verbose) fprintf(stderr, "0");
00513 lexer->Lex();
00514 if (verbose) fprintf(stderr, "[0]");
00515 return false;
00516 }
00517
00518 bool first = true;
00519 while (lexer->GetToken() == TOKEN_UNKNOWN || lexer->GetToken() == TOKEN_IDENTIFIER) {
00520 if (verbose && first) fprintf(stderr, "<assumed true>");
00521 first = false;
00522 lexer->Lex();
00523 }
00524
00525 return true;
00526 }
00527
00535 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose)
00536 {
00537 bool value = ExpressionNot(lexer, defines, verbose);
00538
00539 if (lexer->GetToken() != TOKEN_DEFINED) return value;
00540 lexer->Lex();
00541 if (verbose) fprintf(stderr, "defined");
00542 bool open = (lexer->GetToken() == TOKEN_OPEN);
00543 if (open) lexer->Lex();
00544 if (verbose) fprintf(stderr, open ? "(" : " ");
00545 if (lexer->GetToken() == TOKEN_IDENTIFIER) {
00546 if (verbose) fprintf(stderr, "%s", lexer->GetString());
00547 value = defines->find(lexer->GetString()) != defines->end();
00548 }
00549 if (open) {
00550 if (verbose) fprintf(stderr, ")");
00551 lexer->Lex();
00552 }
00553 lexer->Lex();
00554 if (verbose) fprintf(stderr, "[%d]", value);
00555 return value;
00556 }
00557
00565 bool ExpressionAnd(Lexer *lexer, StringSet *defines, bool verbose)
00566 {
00567 bool value = ExpressionDefined(lexer, defines, verbose);
00568
00569 for (;;) {
00570 if (lexer->GetToken() != TOKEN_AND) return value;
00571 if (verbose) fprintf(stderr, " && ");
00572 lexer->Lex();
00573 value = value && ExpressionDefined(lexer, defines, verbose);
00574 }
00575 }
00576
00584 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose)
00585 {
00586 bool value = ExpressionAnd(lexer, defines, verbose);
00587
00588 for (;;) {
00589 if (lexer->GetToken() != TOKEN_OR) return value;
00590 if (verbose) fprintf(stderr, " || ");
00591 lexer->Lex();
00592 value = value || ExpressionAnd(lexer, defines, verbose);
00593 }
00594 }
00595
00597 enum Ignore {
00598 NOT_IGNORE,
00599 IGNORE_UNTIL_ELSE,
00600 IGNORE_UNTIL_ENDIF,
00601 };
00602
00610 void ScanFile(const char *filename, const char *ext, bool header, bool verbose)
00611 {
00612 static StringSet defines;
00613 static std::stack<Ignore> ignore;
00614
00615 if (!header) {
00616 for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00617 defines.insert(strdup(*it));
00618 }
00619 }
00620
00621 File file(filename);
00622 Lexer lexer(&file);
00623
00624
00625 lexer.Lex();
00626
00627 while (lexer.GetToken() != TOKEN_END) {
00628 switch (lexer.GetToken()) {
00629
00630 case TOKEN_END: break;
00631
00632
00633 case TOKEN_SHARP:
00634 lexer.Lex();
00635 switch (lexer.GetToken()) {
00636 case TOKEN_INCLUDE:
00637 if (verbose) fprintf(stderr, "%s #include ", filename);
00638 lexer.Lex();
00639 switch (lexer.GetToken()) {
00640 case TOKEN_LOCAL:
00641 case TOKEN_GLOBAL: {
00642 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00643 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00644 if (verbose) fprintf(stderr, " (ignored)");
00645 break;
00646 }
00647 const char *h = GeneratePath(file.GetDirname(), lexer.GetString(), lexer.GetToken() == TOKEN_LOCAL);
00648 if (h != NULL) {
00649 StringMap::iterator it = _headers.find(h);
00650 if (it == _headers.end()) {
00651 it = (_headers.insert(StringMapItem(strdup(h), new StringSet()))).first;
00652 if (verbose) fprintf(stderr, "\n");
00653 ScanFile(h, ext, true, verbose);
00654 }
00655 StringMap::iterator curfile;
00656 if (header) {
00657 curfile = _headers.find(filename);
00658 } else {
00659
00660 char path[PATH_MAX];
00661 strcpy(path, filename);
00662 *(strrchr(path, '.')) = '\0';
00663 strcat(path, ext != NULL ? ext : ".o");
00664 curfile = _files.find(path);
00665 if (curfile == _files.end()) {
00666 curfile = (_files.insert(StringMapItem(strdup(path), new StringSet()))).first;
00667 }
00668 }
00669 if (it != _headers.end()) {
00670 for (StringSet::iterator header = it->second->begin(); header != it->second->end(); header++) {
00671 if (curfile->second->find(*header) == curfile->second->end()) curfile->second->insert(strdup(*header));
00672 }
00673 }
00674 if (curfile->second->find(h) == curfile->second->end()) curfile->second->insert(strdup(h));
00675 free((void*)h);
00676 }
00677 }
00678
00679 default: break;
00680 }
00681 break;
00682
00683 case TOKEN_DEFINE:
00684 if (verbose) fprintf(stderr, "%s #define ", filename);
00685 lexer.Lex();
00686 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00687 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00688 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00689 if (verbose) fprintf(stderr, " (ignored)");
00690 break;
00691 }
00692 if (defines.find(lexer.GetString()) == defines.end()) defines.insert(strdup(lexer.GetString()));
00693 lexer.Lex();
00694 }
00695 break;
00696
00697 case TOKEN_UNDEF:
00698 if (verbose) fprintf(stderr, "%s #undef ", filename);
00699 lexer.Lex();
00700 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00701 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00702 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00703 if (verbose) fprintf(stderr, " (ignored)");
00704 break;
00705 }
00706 StringSet::iterator it = defines.find(lexer.GetString());
00707 if (it != defines.end()) {
00708 free((void*)*it);
00709 defines.erase(it);
00710 }
00711 lexer.Lex();
00712 }
00713 break;
00714
00715 case TOKEN_ENDIF:
00716 if (verbose) fprintf(stderr, "%s #endif", filename);
00717 lexer.Lex();
00718 if (!ignore.empty()) ignore.pop();
00719 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00720 break;
00721
00722 case TOKEN_ELSE: {
00723 if (verbose) fprintf(stderr, "%s #else", filename);
00724 lexer.Lex();
00725 Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00726 if (!ignore.empty()) ignore.pop();
00727 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00728 ignore.push(last == IGNORE_UNTIL_ELSE ? NOT_IGNORE : IGNORE_UNTIL_ENDIF);
00729 } else {
00730 ignore.push(IGNORE_UNTIL_ENDIF);
00731 }
00732 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00733 break;
00734 }
00735
00736 case TOKEN_ELIF: {
00737 if (verbose) fprintf(stderr, "%s #elif ", filename);
00738 lexer.Lex();
00739 Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00740 if (!ignore.empty()) ignore.pop();
00741 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00742 bool value = ExpressionOr(&lexer, &defines, verbose);
00743 ignore.push(last == IGNORE_UNTIL_ELSE ? (value ? NOT_IGNORE : IGNORE_UNTIL_ELSE) : IGNORE_UNTIL_ENDIF);
00744 } else {
00745 ignore.push(IGNORE_UNTIL_ENDIF);
00746 }
00747 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00748 break;
00749 }
00750
00751 case TOKEN_IF: {
00752 if (verbose) fprintf(stderr, "%s #if ", filename);
00753 lexer.Lex();
00754 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00755 bool value = ExpressionOr(&lexer, &defines, verbose);
00756 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00757 } else {
00758 ignore.push(IGNORE_UNTIL_ENDIF);
00759 }
00760 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00761 break;
00762 }
00763
00764 case TOKEN_IFDEF:
00765 if (verbose) fprintf(stderr, "%s #ifdef ", filename);
00766 lexer.Lex();
00767 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00768 bool value = defines.find(lexer.GetString()) != defines.end();
00769 if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00770 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00771 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00772 } else {
00773 ignore.push(IGNORE_UNTIL_ENDIF);
00774 }
00775 }
00776 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00777 break;
00778
00779 case TOKEN_IFNDEF:
00780 if (verbose) fprintf(stderr, "%s #ifndef ", filename);
00781 lexer.Lex();
00782 if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00783 bool value = defines.find(lexer.GetString()) != defines.end();
00784 if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00785 if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00786 ignore.push(!value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00787 } else {
00788 ignore.push(IGNORE_UNTIL_ENDIF);
00789 }
00790 }
00791 if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00792 break;
00793
00794 default:
00795 if (verbose) fprintf(stderr, "%s #<unknown>", filename);
00796 lexer.Lex();
00797 break;
00798 }
00799 if (verbose) fprintf(stderr, "\n");
00800
00801 default:
00802
00803 while (lexer.GetToken() != TOKEN_EOL && lexer.GetToken() != TOKEN_END) lexer.Lex();
00804 lexer.Lex();
00805 break;
00806 }
00807 }
00808
00809 if (!header) {
00810 for (StringSet::iterator it = defines.begin(); it != defines.end(); it++) {
00811 free((void*)*it);
00812 }
00813 defines.clear();
00814 while (!ignore.empty()) ignore.pop();
00815 }
00816 }
00817
00824 int main(int argc, char *argv[])
00825 {
00826 bool ignorenext = true;
00827 char *filename = NULL;
00828 char *ext = NULL;
00829 char *delimiter = NULL;
00830 bool append = false;
00831 bool verbose = false;
00832
00833 for (int i = 0; i < argc; i++) {
00834 if (ignorenext) {
00835 ignorenext = false;
00836 continue;
00837 }
00838 if (argv[i][0] == '-') {
00839
00840 if (strncmp(argv[i], "-a", 2) == 0) append = true;
00841
00842 if (strncmp(argv[i], "-I", 2) == 0) {
00843 if (argv[i][2] == '\0') {
00844 i++;
00845 _include_dirs.insert(strdup(argv[i]));
00846 } else {
00847 _include_dirs.insert(strdup(&argv[i][2]));
00848 }
00849 continue;
00850 }
00851
00852 if (strncmp(argv[i], "-D", 2) == 0) {
00853 char *p = strchr(argv[i], '=');
00854 if (p != NULL) *p = '\0';
00855 _defines.insert(strdup(&argv[i][2]));
00856 continue;
00857 }
00858
00859 if (strncmp(argv[i], "-f", 2) == 0) {
00860 if (filename != NULL) continue;
00861 filename = strdup(&argv[i][2]);
00862 continue;
00863 }
00864
00865 if (strncmp(argv[i], "-o", 2) == 0) {
00866 if (ext != NULL) continue;
00867 ext = strdup(&argv[i][2]);
00868 continue;
00869 }
00870
00871 if (strncmp(argv[i], "-s", 2) == 0) {
00872 if (delimiter != NULL) continue;
00873 delimiter = strdup(&argv[i][2]);
00874 continue;
00875 }
00876
00877 if (strncmp(argv[i], "-v", 2) == 0) verbose = true;
00878 continue;
00879 }
00880 ScanFile(argv[i], ext, false, verbose);
00881 }
00882
00883
00884 if (filename == NULL) filename = strdup("Makefile");
00885
00886
00887 if (delimiter == NULL) delimiter = strdup("# DO NOT DELETE");
00888
00889 char backup[PATH_MAX];
00890 strcpy(backup, filename);
00891 strcat(backup, ".bak");
00892
00893 char *content = NULL;
00894 long size = 0;
00895
00896
00897
00898 FILE *src = fopen(filename, "rb");
00899 if (src != NULL) {
00900 fseek(src, 0, SEEK_END);
00901 size = ftell(src);
00902 rewind(src);
00903 content = (char*)malloc(size * sizeof(*content));
00904 fread(content, 1, size, src);
00905 fclose(src);
00906 }
00907
00908 FILE *dst = fopen(filename, "w");
00909 bool found_delimiter = false;
00910
00911 if (size != 0) {
00912 src = fopen(backup, "wb");
00913 fwrite(content, 1, size, src);
00914 fclose(src);
00915
00916
00917 src = fopen(backup, "rb");
00918 while (fgets(content, size, src) != NULL) {
00919 fputs(content, dst);
00920 if (!strncmp(content, delimiter, strlen(delimiter))) found_delimiter = true;
00921 if (!append && found_delimiter) break;
00922 }
00923 fclose(src);
00924 }
00925 if (!found_delimiter) fprintf(dst, "\n%s\n", delimiter);
00926
00927 for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00928 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00929 fprintf(dst, "%s: %s\n", it->first, *h);
00930 }
00931 }
00932
00933
00934 fclose(dst);
00935
00936 free(delimiter);
00937 free(filename);
00938 free(ext);
00939 free(content);
00940
00941 for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00942 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00943 free((void*)*h);
00944 }
00945 it->second->clear();
00946 delete it->second;
00947 free((void*)it->first);
00948 }
00949 _files.clear();
00950
00951 for (StringMap::iterator it = _headers.begin(); it != _headers.end(); it++) {
00952 for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00953 free((void*)*h);
00954 }
00955 it->second->clear();
00956 delete it->second;
00957 free((void*)it->first);
00958 }
00959 _headers.clear();
00960
00961 for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00962 free((void*)*it);
00963 }
00964 _defines.clear();
00965
00966 for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00967 free((void*)*it);
00968 }
00969 _include_dirs.clear();
00970
00971 return 0;
00972 }