depend.cpp

Go to the documentation of this file.
00001 /* $Id$ */
00002 
00003 /*
00004  * This file is part of OpenTTD.
00005  * OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
00006  * OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
00007  * See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
00008  */
00009 
00023 #include <stdio.h>
00024 #include <stdlib.h>
00025 #include <string.h>
00026 #include <ctype.h>
00027 #include <limits.h>
00028 #include <unistd.h>
00029 #include <map>
00030 #include <set>
00031 #include <stack>
00032 
00037 static inline void free(const void *ptr)
00038 {
00039   free(const_cast<void *>(ptr));
00040 }
00041 
00042 #ifndef PATH_MAX
00043 
00044 # define PATH_MAX 260
00045 #endif
00046 
00048 struct StringCompare {
00055   bool operator () (const char *a, const char *b) const
00056   {
00057     return strcmp(a, b) < 0;
00058   }
00059 };
00061 typedef std::set<const char*, StringCompare> StringSet;
00063 typedef std::map<const char*, StringSet*, StringCompare> StringMap;
00065 typedef std::pair<const char*, StringSet*> StringMapItem;
00066 
00068 static StringSet _include_dirs;
00070 static StringMap _files;
00072 static StringMap _headers;
00074 static StringSet _defines;
00075 
00079 class File {
00080 public:
00086   File(const char *filename)
00087   {
00088     this->fp = fopen(filename, "r");
00089     if (this->fp == NULL) {
00090       fprintf(stdout, "Could not open %s for reading\n", filename);
00091       exit(1);
00092     }
00093     this->dirname = strdup(filename);
00094     char *last = strrchr(this->dirname, '/');
00095     if (last != NULL) {
00096       *last = '\0';
00097     } else {
00098       *this->dirname = '\0';
00099     }
00100   }
00101 
00103   ~File()
00104   {
00105     fclose(this->fp);
00106     free(this->dirname);
00107   }
00108 
00114   char GetChar() const
00115   {
00116     int c = fgetc(this->fp);
00117     return (c == EOF) ? '\0' : c;
00118   }
00119 
00124   const char *GetDirname() const
00125   {
00126     return this->dirname;
00127   }
00128 
00129 private:
00130   FILE *fp;             
00131   char *dirname;        
00132 };
00133 
00135 enum Token {
00136   TOKEN_UNKNOWN,    
00137   TOKEN_END,        
00138   TOKEN_EOL,        
00139   TOKEN_SHARP,      
00140   TOKEN_LOCAL,      
00141   TOKEN_GLOBAL,     
00142   TOKEN_IDENTIFIER, 
00143   TOKEN_DEFINE,     
00144   TOKEN_IF,         
00145   TOKEN_IFDEF,      
00146   TOKEN_IFNDEF,     
00147   TOKEN_ELIF,       
00148   TOKEN_ELSE,       
00149   TOKEN_ENDIF,      
00150   TOKEN_UNDEF,      
00151   TOKEN_OR,         
00152   TOKEN_AND,        
00153   TOKEN_DEFINED,    
00154   TOKEN_OPEN,       
00155   TOKEN_CLOSE,      
00156   TOKEN_NOT,        
00157   TOKEN_ZERO,       
00158   TOKEN_INCLUDE,    
00159 };
00160 
00162 typedef std::map<const char*, Token, StringCompare> KeywordList;
00163 
00167 class Lexer {
00168 public:
00173   Lexer(const File *file) : file(file), current_char('\0'), string(NULL), token(TOKEN_UNKNOWN)
00174   {
00175     this->keywords["define"]  = TOKEN_DEFINE;
00176     this->keywords["defined"] = TOKEN_DEFINED;
00177     this->keywords["if"]      = TOKEN_IF;
00178     this->keywords["ifdef"]   = TOKEN_IFDEF;
00179     this->keywords["ifndef"]  = TOKEN_IFNDEF;
00180     this->keywords["include"] = TOKEN_INCLUDE;
00181     this->keywords["elif"]    = TOKEN_ELIF;
00182     this->keywords["else"]    = TOKEN_ELSE;
00183     this->keywords["endif"]   = TOKEN_ENDIF;
00184     this->keywords["undef"]   = TOKEN_UNDEF;
00185 
00186     /* Initialise currently read character. */
00187     this->Next();
00188 
00189     /* Allocate the buffer. */
00190     this->buf_len = 32;
00191     this->buf = (char*)malloc(sizeof(*this->buf) * this->buf_len);
00192   }
00193 
00195   ~Lexer()
00196   {
00197     free(this->buf);
00198   }
00199 
00203   void Next()
00204   {
00205     this->current_char = this->file->GetChar();
00206   }
00207 
00212   Token GetToken() const
00213   {
00214     return this->token;
00215   }
00216 
00221   const char *GetString() const
00222   {
00223     return this->string;
00224   }
00225 
00230   void Lex()
00231   {
00232     for (;;) {
00233       free(this->string);
00234       this->string = NULL;
00235       this->token  = TOKEN_UNKNOWN;
00236 
00237       switch (this->current_char) {
00238         /* '\0' means End-Of-File */
00239         case '\0': this->token = TOKEN_END; return;
00240 
00241         /* Skip some chars, as they don't do anything */
00242         case '\t': this->Next(); break;
00243         case '\r': this->Next(); break;
00244         case ' ':  this->Next(); break;
00245 
00246         case '\\':
00247           this->Next();
00248           if (this->current_char == '\n') this->Next();
00249           break;
00250 
00251         case '\n':
00252           this->token = TOKEN_EOL;
00253           this->Next();
00254           return;
00255 
00256         case '#':
00257           this->token = TOKEN_SHARP;
00258           this->Next();
00259           return;
00260 
00261         case '"':
00262           this->ReadString('"', TOKEN_LOCAL);
00263           this->Next();
00264           return;
00265 
00266         case '<':
00267           this->ReadString('>', TOKEN_GLOBAL);
00268           this->Next();
00269           return;
00270 
00271         case '&':
00272           this->Next();
00273           if (this->current_char == '&') {
00274             this->Next();
00275             this->token = TOKEN_AND;
00276             return;
00277           }
00278           break;
00279 
00280         case '|':
00281           this->Next();
00282           if (this->current_char == '|') {
00283             this->Next();
00284             this->token = TOKEN_OR;
00285             return;
00286           }
00287           break;
00288 
00289         case '(':
00290           this->Next();
00291           this->token = TOKEN_OPEN;
00292           return;
00293 
00294         case ')':
00295           this->Next();
00296           this->token = TOKEN_CLOSE;
00297           return;
00298 
00299         case '!':
00300           this->Next();
00301           if (this->current_char != '=') {
00302             this->token = TOKEN_NOT;
00303             return;
00304           }
00305           break;
00306 
00307         /* Possible begin of comment */
00308         case '/':
00309           this->Next();
00310           switch (this->current_char) {
00311             case '*': {
00312               this->Next();
00313               char previous_char = '\0';
00314               while ((this->current_char != '/' || previous_char != '*') && this->current_char != '\0') {
00315                 previous_char = this->current_char;
00316                 this->Next();
00317               }
00318               this->Next();
00319               break;
00320             }
00321             case '/': while (this->current_char != '\n' && this->current_char != '\0') this->Next(); break;
00322             default: break;
00323           }
00324           break;
00325 
00326         default:
00327           if (isalpha(this->current_char) || this->current_char == '_') {
00328             /* If the name starts with a letter, it is an identifier */
00329             this->ReadIdentifier();
00330             return;
00331           }
00332           if (isdigit(this->current_char)) {
00333             bool zero = this->current_char == '0';
00334             this->Next();
00335             if (this->current_char == 'x' || this->current_char == 'X') Next();
00336             while (isdigit(this->current_char) || this->current_char == '.' || (this->current_char >= 'a' && this->current_char <= 'f') || (this->current_char >= 'A' && this->current_char <= 'F')) {
00337               zero &= this->current_char == '0';
00338               this->Next();
00339             }
00340             if (zero) this->token = TOKEN_ZERO;
00341             return;
00342           }
00343           this->Next();
00344           break;
00345       }
00346     }
00347   }
00348 
00349 private:
00355   Token FindKeyword(const char *name) const
00356   {
00357     KeywordList::const_iterator it = this->keywords.find(name);
00358     if (it == this->keywords.end()) return TOKEN_IDENTIFIER;
00359     return (*it).second;
00360   }
00361 
00365   void ReadIdentifier()
00366   {
00367     size_t count = 0;
00368 
00369     /* Read the rest of the identifier */
00370     do {
00371       this->buf[count++] = this->current_char;
00372       this->Next();
00373 
00374       if (count >= buf_len) {
00375         /* Scale the buffer if required */
00376         this->buf_len *= 2;
00377         this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00378       }
00379     } while ((isalpha(this->current_char) || this->current_char == '_' || isdigit(this->current_char)));
00380     this->buf[count] = '\0';
00381 
00382     free(this->string);
00383     this->string = strdup(this->buf);
00384     this->token = FindKeyword(this->string);
00385   }
00386 
00392   void ReadString(char end, Token token)
00393   {
00394     size_t count = 0;
00395     this->Next();
00396     while (this->current_char != end && this->current_char != ')' && this->current_char != '\n' && this->current_char != '\0') {
00397       this->buf[count++] = this->current_char;
00398       this->Next();
00399 
00400       if (count >= this->buf_len) {
00401         /* Scale the buffer if required */
00402         this->buf_len *= 2;
00403         this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00404       }
00405     }
00406     this->buf[count] = '\0';
00407     free(this->string);
00408     this->string = strdup(this->buf);
00409     this->token = token;
00410   }
00411 
00412   const File *file;     
00413   char current_char;    
00414   char *string;         
00415   Token token;          
00416   char *buf;            
00417   size_t buf_len;       
00418   KeywordList keywords; 
00419 };
00420 
00431 const char *GeneratePath(const char *dirname, const char *filename, bool local)
00432 {
00433   if (local) {
00434     if (access(filename, R_OK) == 0) return strdup(filename);
00435 
00436     char path[PATH_MAX];
00437     strcpy(path, dirname);
00438     const char *p = filename;
00439     /* Remove '..' from the begin of the filename. */
00440     while (*p == '.') {
00441       if (*(++p) == '.') {
00442         char *s = strrchr(path, '/');
00443         if (s != NULL) *s = '\0';
00444         p += 2;
00445       }
00446     }
00447     strcat(path, "/");
00448     strcat(path, p);
00449 
00450     if (access(path, R_OK) == 0) return strdup(path);
00451   }
00452 
00453   for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00454     char path[PATH_MAX];
00455     strcpy(path, *it);
00456     const char *p = filename;
00457     /* Remove '..' from the begin of the filename. */
00458     while (*p == '.') {
00459       if (*(++p) == '.') {
00460         char *s = strrchr(path, '/');
00461         if (s != NULL) *s = '\0';
00462         p += 2;
00463       }
00464     }
00465     strcat(path, "/");
00466     strcat(path, p);
00467 
00468     if (access(path, R_OK) == 0) return strdup(path);
00469   }
00470 
00471   return NULL;
00472 }
00473 
00481 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose);
00482 
00490 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose);
00491 
00500 bool ExpressionNot(Lexer *lexer, StringSet *defines, bool verbose)
00501 {
00502   if (lexer->GetToken() == TOKEN_NOT) {
00503     if (verbose) fprintf(stderr, "!");
00504     lexer->Lex();
00505     bool value = !ExpressionDefined(lexer, defines, verbose);
00506     if (verbose) fprintf(stderr, "[%d]", value);
00507     return value;
00508   }
00509 
00510   if (lexer->GetToken() == TOKEN_OPEN) {
00511     if (verbose) fprintf(stderr, "(");
00512     lexer->Lex();
00513     bool value = ExpressionOr(lexer, defines, verbose);
00514     if (verbose) fprintf(stderr, ")[%d]", value);
00515     lexer->Lex();
00516     return value;
00517   }
00518 
00519   if (lexer->GetToken() == TOKEN_ZERO) {
00520     if (verbose) fprintf(stderr, "0");
00521     lexer->Lex();
00522     if (verbose) fprintf(stderr, "[0]");
00523     return false;
00524   }
00525 
00526   bool first = true;
00527   while (lexer->GetToken() == TOKEN_UNKNOWN || lexer->GetToken() == TOKEN_IDENTIFIER) {
00528     if (verbose && first) fprintf(stderr, "<assumed true>");
00529     first = false;
00530     lexer->Lex();
00531   }
00532 
00533   return true;
00534 }
00535 
00543 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose)
00544 {
00545   bool value = ExpressionNot(lexer, defines, verbose);
00546 
00547   if (lexer->GetToken() != TOKEN_DEFINED) return value;
00548   lexer->Lex();
00549   if (verbose) fprintf(stderr, "defined");
00550   bool open = (lexer->GetToken() == TOKEN_OPEN);
00551   if (open) lexer->Lex();
00552   if (verbose) fprintf(stderr, open ? "(" : " ");
00553   if (lexer->GetToken() == TOKEN_IDENTIFIER) {
00554     if (verbose) fprintf(stderr, "%s", lexer->GetString());
00555     value = defines->find(lexer->GetString()) != defines->end();
00556   }
00557   if (open) {
00558     if (verbose) fprintf(stderr, ")");
00559     lexer->Lex();
00560   }
00561   lexer->Lex();
00562   if (verbose) fprintf(stderr, "[%d]", value);
00563   return value;
00564 }
00565 
00573 bool ExpressionAnd(Lexer *lexer, StringSet *defines, bool verbose)
00574 {
00575   bool value = ExpressionDefined(lexer, defines, verbose);
00576 
00577   for (;;) {
00578     if (lexer->GetToken() != TOKEN_AND) return value;
00579     if (verbose) fprintf(stderr, " && ");
00580     lexer->Lex();
00581     value = value && ExpressionDefined(lexer, defines, verbose);
00582   }
00583 }
00584 
00592 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose)
00593 {
00594   bool value = ExpressionAnd(lexer, defines, verbose);
00595 
00596   for (;;) {
00597     if (lexer->GetToken() != TOKEN_OR) return value;
00598     if (verbose) fprintf(stderr, " || ");
00599     lexer->Lex();
00600     value = value || ExpressionAnd(lexer, defines, verbose);
00601   }
00602 }
00603 
00605 enum Ignore {
00606   NOT_IGNORE,         
00607   IGNORE_UNTIL_ELSE,  
00608   IGNORE_UNTIL_ENDIF, 
00609 };
00610 
00618 void ScanFile(const char *filename, const char *ext, bool header, bool verbose)
00619 {
00620   static StringSet defines;
00621   static std::stack<Ignore> ignore;
00622   /* Copy in the default defines (parameters of depend) */
00623   if (!header) {
00624     for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00625       defines.insert(strdup(*it));
00626     }
00627   }
00628 
00629   File file(filename);
00630   Lexer lexer(&file);
00631 
00632   /* Start the lexing! */
00633   lexer.Lex();
00634 
00635   while (lexer.GetToken() != TOKEN_END) {
00636     switch (lexer.GetToken()) {
00637       /* We reached the end of the file... yay, we're done! */
00638       case TOKEN_END: break;
00639 
00640       /* The line started with a # (minus whitespace) */
00641       case TOKEN_SHARP:
00642         lexer.Lex();
00643         switch (lexer.GetToken()) {
00644           case TOKEN_INCLUDE:
00645             if (verbose) fprintf(stderr, "%s #include ", filename);
00646             lexer.Lex();
00647             switch (lexer.GetToken()) {
00648               case TOKEN_LOCAL:
00649               case TOKEN_GLOBAL: {
00650                 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00651                 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00652                   if (verbose) fprintf(stderr, " (ignored)");
00653                   break;
00654                 }
00655                 const char *h = GeneratePath(file.GetDirname(), lexer.GetString(), lexer.GetToken() == TOKEN_LOCAL);
00656                 if (h != NULL) {
00657                   StringMap::iterator it = _headers.find(h);
00658                   if (it == _headers.end()) {
00659                     it = (_headers.insert(StringMapItem(strdup(h), new StringSet()))).first;
00660                     if (verbose) fprintf(stderr, "\n");
00661                     ScanFile(h, ext, true, verbose);
00662                   }
00663                   StringMap::iterator curfile;
00664                   if (header) {
00665                     curfile = _headers.find(filename);
00666                   } else {
00667                     /* Replace the extension with the provided extension of '.o'. */
00668                     char path[PATH_MAX];
00669                     strcpy(path, filename);
00670                     *(strrchr(path, '.')) = '\0';
00671                     strcat(path, ext != NULL ? ext : ".o");
00672                     curfile = _files.find(path);
00673                     if (curfile == _files.end()) {
00674                       curfile = (_files.insert(StringMapItem(strdup(path), new StringSet()))).first;
00675                     }
00676                   }
00677                   if (it != _headers.end()) {
00678                     for (StringSet::iterator header = it->second->begin(); header != it->second->end(); header++) {
00679                       if (curfile->second->find(*header) == curfile->second->end()) curfile->second->insert(strdup(*header));
00680                     }
00681                   }
00682                   if (curfile->second->find(h) == curfile->second->end()) curfile->second->insert(strdup(h));
00683                   free(h);
00684                 }
00685               }
00686               /* FALL THROUGH */
00687               default: break;
00688             }
00689             break;
00690 
00691           case TOKEN_DEFINE:
00692             if (verbose) fprintf(stderr, "%s #define ", filename);
00693             lexer.Lex();
00694             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00695               if (verbose) fprintf(stderr, "%s", lexer.GetString());
00696               if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00697                 if (verbose) fprintf(stderr, " (ignored)");
00698                 break;
00699               }
00700               if (defines.find(lexer.GetString()) == defines.end()) defines.insert(strdup(lexer.GetString()));
00701               lexer.Lex();
00702             }
00703             break;
00704 
00705           case TOKEN_UNDEF:
00706             if (verbose) fprintf(stderr, "%s #undef ", filename);
00707             lexer.Lex();
00708             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00709               if (verbose) fprintf(stderr, "%s", lexer.GetString());
00710               if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00711                 if (verbose) fprintf(stderr, " (ignored)");
00712                 break;
00713               }
00714               StringSet::iterator it = defines.find(lexer.GetString());
00715               if (it != defines.end()) {
00716                 free(*it);
00717                 defines.erase(it);
00718               }
00719               lexer.Lex();
00720             }
00721             break;
00722 
00723           case TOKEN_ENDIF:
00724             if (verbose) fprintf(stderr, "%s #endif", filename);
00725             lexer.Lex();
00726             if (!ignore.empty()) ignore.pop();
00727             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00728             break;
00729 
00730           case TOKEN_ELSE: {
00731             if (verbose) fprintf(stderr, "%s #else", filename);
00732             lexer.Lex();
00733             Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00734             if (!ignore.empty()) ignore.pop();
00735             if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00736               ignore.push(last == IGNORE_UNTIL_ELSE ? NOT_IGNORE : IGNORE_UNTIL_ENDIF);
00737             } else {
00738               ignore.push(IGNORE_UNTIL_ENDIF);
00739             }
00740             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00741             break;
00742           }
00743 
00744           case TOKEN_ELIF: {
00745             if (verbose) fprintf(stderr, "%s #elif ", filename);
00746             lexer.Lex();
00747             Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00748             if (!ignore.empty()) ignore.pop();
00749             if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00750               bool value = ExpressionOr(&lexer, &defines, verbose);
00751               ignore.push(last == IGNORE_UNTIL_ELSE ? (value ? NOT_IGNORE : IGNORE_UNTIL_ELSE) : IGNORE_UNTIL_ENDIF);
00752             } else {
00753               ignore.push(IGNORE_UNTIL_ENDIF);
00754             }
00755             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00756             break;
00757           }
00758 
00759           case TOKEN_IF: {
00760             if (verbose) fprintf(stderr, "%s #if ", filename);
00761             lexer.Lex();
00762             if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00763               bool value = ExpressionOr(&lexer, &defines, verbose);
00764               ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00765             } else {
00766               ignore.push(IGNORE_UNTIL_ENDIF);
00767             }
00768             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00769             break;
00770           }
00771 
00772           case TOKEN_IFDEF:
00773             if (verbose) fprintf(stderr, "%s #ifdef ", filename);
00774             lexer.Lex();
00775             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00776               bool value = defines.find(lexer.GetString()) != defines.end();
00777               if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00778               if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00779                 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00780               } else {
00781                 ignore.push(IGNORE_UNTIL_ENDIF);
00782               }
00783             }
00784             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00785             break;
00786 
00787           case TOKEN_IFNDEF:
00788             if (verbose) fprintf(stderr, "%s #ifndef ", filename);
00789             lexer.Lex();
00790             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00791               bool value = defines.find(lexer.GetString()) != defines.end();
00792               if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00793               if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00794                 ignore.push(!value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00795               } else {
00796                 ignore.push(IGNORE_UNTIL_ENDIF);
00797               }
00798             }
00799             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00800             break;
00801 
00802           default:
00803             if (verbose) fprintf(stderr, "%s #<unknown>", filename);
00804             lexer.Lex();
00805             break;
00806         }
00807         if (verbose) fprintf(stderr, "\n");
00808         /* FALL THROUGH */
00809       default:
00810         /* Ignore the rest of the garbage on this line */
00811         while (lexer.GetToken() != TOKEN_EOL && lexer.GetToken() != TOKEN_END) lexer.Lex();
00812         lexer.Lex();
00813         break;
00814     }
00815   }
00816 
00817   if (!header) {
00818     for (StringSet::iterator it = defines.begin(); it != defines.end(); it++) {
00819       free(*it);
00820     }
00821     defines.clear();
00822     while (!ignore.empty()) ignore.pop();
00823   }
00824 }
00825 
00832 int main(int argc, char *argv[])
00833 {
00834   bool ignorenext = true;
00835   char *filename = NULL;
00836   char *ext = NULL;
00837   char *delimiter = NULL;
00838   bool append = false;
00839   bool verbose = false;
00840 
00841   for (int i = 0; i < argc; i++) {
00842     if (ignorenext) {
00843       ignorenext = false;
00844       continue;
00845     }
00846     if (argv[i][0] == '-') {
00847       /* Append */
00848       if (strncmp(argv[i], "-a", 2) == 0) append = true;
00849       /* Include dir */
00850       if (strncmp(argv[i], "-I", 2) == 0) {
00851         if (argv[i][2] == '\0') {
00852           i++;
00853           _include_dirs.insert(strdup(argv[i]));
00854         } else {
00855           _include_dirs.insert(strdup(&argv[i][2]));
00856         }
00857         continue;
00858       }
00859       /* Define */
00860       if (strncmp(argv[i], "-D", 2) == 0) {
00861         char *p = strchr(argv[i], '=');
00862         if (p != NULL) *p = '\0';
00863         _defines.insert(strdup(&argv[i][2]));
00864         continue;
00865       }
00866       /* Output file */
00867       if (strncmp(argv[i], "-f", 2) == 0) {
00868         if (filename != NULL) continue;
00869         filename = strdup(&argv[i][2]);
00870         continue;
00871       }
00872       /* Object file extension */
00873       if (strncmp(argv[i], "-o", 2) == 0) {
00874         if (ext != NULL) continue;
00875         ext = strdup(&argv[i][2]);
00876         continue;
00877       }
00878       /* Starting string delimiter */
00879       if (strncmp(argv[i], "-s", 2) == 0) {
00880         if (delimiter != NULL) continue;
00881         delimiter = strdup(&argv[i][2]);
00882         continue;
00883       }
00884       /* Verbose */
00885       if (strncmp(argv[i], "-v", 2) == 0) verbose = true;
00886       continue;
00887     }
00888     ScanFile(argv[i], ext, false, verbose);
00889   }
00890 
00891   /* Default output file is Makefile */
00892   if (filename == NULL) filename = strdup("Makefile");
00893 
00894   /* Default delimiter string */
00895   if (delimiter == NULL) delimiter = strdup("# DO NOT DELETE");
00896 
00897   char backup[PATH_MAX];
00898   strcpy(backup, filename);
00899   strcat(backup, ".bak");
00900 
00901   char *content = NULL;
00902   long size = 0;
00903 
00904   /* Read in the current file; so we can overwrite everything from the
00905    * end of non-depend data marker down till the end. */
00906   FILE *src = fopen(filename, "rb");
00907   if (src != NULL) {
00908     fseek(src, 0, SEEK_END);
00909     size = ftell(src);
00910     rewind(src);
00911     content = (char*)malloc(size * sizeof(*content));
00912     if (fread(content, 1, size, src) != (size_t)size) {
00913       fprintf(stderr, "Could not read %s\n", filename);
00914       exit(-2);
00915     }
00916     fclose(src);
00917   }
00918 
00919   FILE *dst = fopen(filename, "w");
00920   bool found_delimiter = false;
00921 
00922   if (size != 0) {
00923     src = fopen(backup, "wb");
00924     if (fwrite(content, 1, size, src) != (size_t)size) {
00925       fprintf(stderr, "Could not write %s\n", filename);
00926       exit(-2);
00927     }
00928     fclose(src);
00929 
00930     /* Then append it to the real file. */
00931     src = fopen(backup, "rb");
00932     while (fgets(content, size, src) != NULL) {
00933       fputs(content, dst);
00934       if (!strncmp(content, delimiter, strlen(delimiter))) found_delimiter = true;
00935       if (!append && found_delimiter) break;
00936     }
00937     fclose(src);
00938   }
00939   if (!found_delimiter) fprintf(dst, "\n%s\n", delimiter);
00940 
00941   for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00942     for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00943       fprintf(dst, "%s: %s\n", it->first, *h);
00944     }
00945   }
00946 
00947   /* Clean up our mess. */
00948   fclose(dst);
00949 
00950   free(delimiter);
00951   free(filename);
00952   free(ext);
00953   free(content);
00954 
00955   for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00956     for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00957       free(*h);
00958     }
00959     it->second->clear();
00960     delete it->second;
00961     free(it->first);
00962   }
00963   _files.clear();
00964 
00965   for (StringMap::iterator it = _headers.begin(); it != _headers.end(); it++) {
00966     for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00967       free(*h);
00968     }
00969     it->second->clear();
00970     delete it->second;
00971     free(it->first);
00972   }
00973   _headers.clear();
00974 
00975   for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00976     free(*it);
00977   }
00978   _defines.clear();
00979 
00980   for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00981     free(*it);
00982   }
00983   _include_dirs.clear();
00984 
00985   return 0;
00986 }