atleast libapt should announce to itself that it is clean…
[ntk/apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
73 Cache.HeaderP->Architecture = idxArchitecture;
74 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
75 return;
76 Cache.ReMap();
77 }
78 else
79 {
80 // Map directly from the existing file
81 Cache.ReMap();
82 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
83 if (Cache.VS != _system->VS)
84 {
85 _error->Error(_("Cache has an incompatible versioning system"));
86 return;
87 }
88 }
89
90 Cache.HeaderP->Dirty = true;
91 Map.Sync(0,sizeof(pkgCache::Header));
92 }
93 /*}}}*/
94 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
95 // ---------------------------------------------------------------------
96 /* We sync the data then unset the dirty flag in two steps so as to
97 advoid a problem during a crash */
98 pkgCacheGenerator::~pkgCacheGenerator()
99 {
100 if (_error->PendingError() == true)
101 return;
102 if (Map.Sync() == false)
103 return;
104
105 Cache.HeaderP->Dirty = false;
106 Cache.HeaderP->CacheFileSize = Map.Size();
107 Map.Sync(0,sizeof(pkgCache::Header));
108 }
109 /*}}}*/
110 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
111 if (oldMap == newMap)
112 return;
113
114 if (_config->FindB("Debug::pkgCacheGen", false))
115 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
116
117 Cache.ReMap(false);
118
119 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
120
121 for (size_t i = 0; i < _count(UniqHash); ++i)
122 if (UniqHash[i] != 0)
123 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
124
125 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
126 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
127 (*i)->ReMap(oldMap, newMap);
128 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
129 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
130 (*i)->ReMap(oldMap, newMap);
131 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
132 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
133 (*i)->ReMap(oldMap, newMap);
134 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
135 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
136 (*i)->ReMap(oldMap, newMap);
137 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
138 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
139 (*i)->ReMap(oldMap, newMap);
140 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
141 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
142 (*i)->ReMap(oldMap, newMap);
143 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
144 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
145 (*i)->ReMap(oldMap, newMap);
146 } /*}}}*/
147 // CacheGenerator::WriteStringInMap /*{{{*/
148 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
149 const unsigned long &Len) {
150 void const * const oldMap = Map.Data();
151 map_ptrloc const index = Map.WriteString(String, Len);
152 if (index != 0)
153 ReMap(oldMap, Map.Data());
154 return index;
155 }
156 /*}}}*/
157 // CacheGenerator::WriteStringInMap /*{{{*/
158 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
159 void const * const oldMap = Map.Data();
160 map_ptrloc const index = Map.WriteString(String);
161 if (index != 0)
162 ReMap(oldMap, Map.Data());
163 return index;
164 }
165 /*}}}*/
166 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
167 void const * const oldMap = Map.Data();
168 map_ptrloc const index = Map.Allocate(size);
169 if (index != 0)
170 ReMap(oldMap, Map.Data());
171 return index;
172 }
173 /*}}}*/
174 // CacheGenerator::MergeList - Merge the package list /*{{{*/
175 // ---------------------------------------------------------------------
176 /* This provides the generation of the entries in the cache. Each loop
177 goes through a single package record from the underlying parse engine. */
178 bool pkgCacheGenerator::MergeList(ListParser &List,
179 pkgCache::VerIterator *OutVer)
180 {
181 List.Owner = this;
182
183 unsigned int Counter = 0;
184 while (List.Step() == true)
185 {
186 string const PackageName = List.Package();
187 if (PackageName.empty() == true)
188 return false;
189
190 Counter++;
191 if (Counter % 100 == 0 && Progress != 0)
192 Progress->Progress(List.Offset());
193
194 string Arch = List.Architecture();
195 string const Version = List.Version();
196 if (Version.empty() == true && Arch.empty() == true)
197 {
198 if (MergeListGroup(List, PackageName) == false)
199 return false;
200 }
201
202 if (Arch.empty() == true)
203 Arch = _config->Find("APT::Architecture");
204
205 // Get a pointer to the package structure
206 pkgCache::PkgIterator Pkg;
207 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
208 if (NewPackage(Pkg, PackageName, Arch) == false)
209 // TRANSLATOR: The first placeholder is a package name,
210 // the other two should be copied verbatim as they include debug info
211 return _error->Error(_("Error occurred while processing %s (%s%d)"),
212 PackageName.c_str(), "NewPackage", 1);
213
214
215 if (Version.empty() == true)
216 {
217 if (MergeListPackage(List, Pkg) == false)
218 return false;
219 }
220 else
221 {
222 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
223 return false;
224 }
225
226 if (OutVer != 0)
227 {
228 FoundFileDeps |= List.HasFileDeps();
229 return true;
230 }
231 }
232
233 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
234 return _error->Error(_("Wow, you exceeded the number of package "
235 "names this APT is capable of."));
236 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
237 return _error->Error(_("Wow, you exceeded the number of versions "
238 "this APT is capable of."));
239 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
240 return _error->Error(_("Wow, you exceeded the number of descriptions "
241 "this APT is capable of."));
242 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
243 return _error->Error(_("Wow, you exceeded the number of dependencies "
244 "this APT is capable of."));
245
246 FoundFileDeps |= List.HasFileDeps();
247 return true;
248 }
249 // CacheGenerator::MergeListGroup /*{{{*/
250 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
251 {
252 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
253 // a group has no data on it's own, only packages have it but these
254 // stanzas like this come from Translation- files to add descriptions,
255 // but without a version we don't need a description for it…
256 if (Grp.end() == true)
257 return true;
258 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
259
260 pkgCache::PkgIterator Pkg;
261 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
262 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
263 if (MergeListPackage(List, Pkg) == false)
264 return false;
265
266 return true;
267 }
268 /*}}}*/
269 // CacheGenerator::MergeListPackage /*{{{*/
270 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
271 {
272 // we first process the package, then the descriptions
273 // (for deb this package processing is in fact a no-op)
274 pkgCache::VerIterator Ver(Cache);
275 Dynamic<pkgCache::VerIterator> DynVer(Ver);
276 if (List.UsePackage(Pkg, Ver) == false)
277 return _error->Error(_("Error occurred while processing %s (%s%d)"),
278 Pkg.Name(), "UsePackage", 1);
279
280 // Find the right version to write the description
281 MD5SumValue CurMd5 = List.Description_md5();
282 std::string CurLang = List.DescriptionLanguage();
283
284 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
285 {
286 pkgCache::DescIterator Desc = Ver.DescriptionList();
287
288 // a version can only have one md5 describing it
289 if (MD5SumValue(Desc.md5()) != CurMd5)
290 continue;
291
292 // don't add a new description if we have one for the given
293 // md5 && language
294 if (IsDuplicateDescription(Desc, CurMd5, CurLang) == true)
295 continue;
296
297 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
298 // we add at the end, so that the start is constant as we need
299 // that to be able to efficiently share these lists
300 map_ptrloc *LastDesc = &Ver->DescriptionList;
301 for (;Desc.end() == false && Desc->NextDesc != 0; ++Desc);
302 if (Desc.end() == false)
303 LastDesc = &Desc->NextDesc;
304
305 void const * const oldMap = Map.Data();
306 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
307 if (oldMap != Map.Data())
308 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
309 *LastDesc = descindex;
310 Desc->ParentPkg = Pkg.Index();
311
312 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
313 return _error->Error(_("Error occurred while processing %s (%s%d)"),
314 Pkg.Name(), "NewFileDesc", 1);
315
316 // we can stop here as all "same" versions will share the description
317 break;
318 }
319
320 return true;
321 }
322 /*}}}*/
323 // CacheGenerator::MergeListVersion /*{{{*/
324 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
325 std::string const &Version, pkgCache::VerIterator* &OutVer)
326 {
327 pkgCache::VerIterator Ver = Pkg.VersionList();
328 Dynamic<pkgCache::VerIterator> DynVer(Ver);
329 map_ptrloc *LastVer = &Pkg->VersionList;
330 void const * oldMap = Map.Data();
331
332 unsigned long const Hash = List.VersionHash();
333 if (Ver.end() == false)
334 {
335 /* We know the list is sorted so we use that fact in the search.
336 Insertion of new versions is done with correct sorting */
337 int Res = 1;
338 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
339 {
340 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
341 // Version is higher as current version - insert here
342 if (Res > 0)
343 break;
344 // Versionstrings are equal - is hash also equal?
345 if (Res == 0 && Ver->Hash == Hash)
346 break;
347 // proceed with the next till we have either the right
348 // or we found another version (which will be lower)
349 }
350
351 /* We already have a version for this item, record that we saw it */
352 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
353 {
354 if (List.UsePackage(Pkg,Ver) == false)
355 return _error->Error(_("Error occurred while processing %s (%s%d)"),
356 Pkg.Name(), "UsePackage", 2);
357
358 if (NewFileVer(Ver,List) == false)
359 return _error->Error(_("Error occurred while processing %s (%s%d)"),
360 Pkg.Name(), "NewFileVer", 1);
361
362 // Read only a single record and return
363 if (OutVer != 0)
364 {
365 *OutVer = Ver;
366 return true;
367 }
368
369 return true;
370 }
371 }
372
373 // Add a new version
374 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
375 if (verindex == 0 && _error->PendingError())
376 return _error->Error(_("Error occurred while processing %s (%s%d)"),
377 Pkg.Name(), "NewVersion", 1);
378
379 if (oldMap != Map.Data())
380 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
381 *LastVer = verindex;
382 Ver->ParentPkg = Pkg.Index();
383 Ver->Hash = Hash;
384
385 if (unlikely(List.NewVersion(Ver) == false))
386 return _error->Error(_("Error occurred while processing %s (%s%d)"),
387 Pkg.Name(), "NewVersion", 2);
388
389 if (unlikely(List.UsePackage(Pkg,Ver) == false))
390 return _error->Error(_("Error occurred while processing %s (%s%d)"),
391 Pkg.Name(), "UsePackage", 3);
392
393 if (unlikely(NewFileVer(Ver,List) == false))
394 return _error->Error(_("Error occurred while processing %s (%s%d)"),
395 Pkg.Name(), "NewFileVer", 2);
396
397 pkgCache::GrpIterator Grp = Pkg.Group();
398 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
399
400 /* If it is the first version of this package we need to add implicit
401 Multi-Arch dependencies to all other package versions in the group now -
402 otherwise we just add them for this new version */
403 if (Pkg.VersionList()->NextVer == 0)
404 {
405 pkgCache::PkgIterator P = Grp.PackageList();
406 Dynamic<pkgCache::PkgIterator> DynP(P);
407 for (; P.end() != true; P = Grp.NextPkg(P))
408 {
409 if (P->ID == Pkg->ID)
410 continue;
411 pkgCache::VerIterator V = P.VersionList();
412 Dynamic<pkgCache::VerIterator> DynV(V);
413 for (; V.end() != true; ++V)
414 if (unlikely(AddImplicitDepends(V, Pkg) == false))
415 return _error->Error(_("Error occurred while processing %s (%s%d)"),
416 Pkg.Name(), "AddImplicitDepends", 1);
417 }
418 }
419 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
420 return _error->Error(_("Error occurred while processing %s (%s%d)"),
421 Pkg.Name(), "AddImplicitDepends", 2);
422
423 // Read only a single record and return
424 if (OutVer != 0)
425 {
426 *OutVer = Ver;
427 return true;
428 }
429
430 /* Record the Description (it is not translated) */
431 MD5SumValue CurMd5 = List.Description_md5();
432 if (CurMd5.Value().empty() == true)
433 return true;
434 std::string CurLang = List.DescriptionLanguage();
435
436 /* Before we add a new description we first search in the group for
437 a version with a description of the same MD5 - if so we reuse this
438 description group instead of creating our own for this version */
439 for (pkgCache::PkgIterator P = Grp.PackageList();
440 P.end() == false; P = Grp.NextPkg(P))
441 {
442 for (pkgCache::VerIterator V = P.VersionList();
443 V.end() == false; ++V)
444 {
445 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
446 continue;
447 Ver->DescriptionList = V->DescriptionList;
448 return true;
449 }
450 }
451
452 // We haven't found reusable descriptions, so add the first description
453 pkgCache::DescIterator Desc = Ver.DescriptionList();
454 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
455 map_ptrloc *LastDesc = &Ver->DescriptionList;
456
457 oldMap = Map.Data();
458 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
459 if (oldMap != Map.Data())
460 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
461 *LastDesc = descindex;
462 Desc->ParentPkg = Pkg.Index();
463
464 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
465 return _error->Error(_("Error occurred while processing %s (%s%d)"),
466 Pkg.Name(), "NewFileDesc", 2);
467
468 return true;
469 }
470 /*}}}*/
471 /*}}}*/
472 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
473 // ---------------------------------------------------------------------
474 /* If we found any file depends while parsing the main list we need to
475 resolve them. Since it is undesired to load the entire list of files
476 into the cache as virtual packages we do a two stage effort. MergeList
477 identifies the file depends and this creates Provdies for them by
478 re-parsing all the indexs. */
479 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
480 {
481 List.Owner = this;
482
483 unsigned int Counter = 0;
484 while (List.Step() == true)
485 {
486 string PackageName = List.Package();
487 if (PackageName.empty() == true)
488 return false;
489 string Version = List.Version();
490 if (Version.empty() == true)
491 continue;
492
493 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
494 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
495 if (Pkg.end() == true)
496 return _error->Error(_("Error occurred while processing %s (%s%d)"),
497 PackageName.c_str(), "FindPkg", 1);
498 Counter++;
499 if (Counter % 100 == 0 && Progress != 0)
500 Progress->Progress(List.Offset());
501
502 unsigned long Hash = List.VersionHash();
503 pkgCache::VerIterator Ver = Pkg.VersionList();
504 Dynamic<pkgCache::VerIterator> DynVer(Ver);
505 for (; Ver.end() == false; ++Ver)
506 {
507 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
508 {
509 if (List.CollectFileProvides(Cache,Ver) == false)
510 return _error->Error(_("Error occurred while processing %s (%s%d)"),
511 PackageName.c_str(), "CollectFileProvides", 1);
512 break;
513 }
514 }
515
516 if (Ver.end() == true)
517 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
518 }
519
520 return true;
521 }
522 /*}}}*/
523 // CacheGenerator::NewGroup - Add a new group /*{{{*/
524 // ---------------------------------------------------------------------
525 /* This creates a new group structure and adds it to the hash table */
526 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
527 {
528 Grp = Cache.FindGrp(Name);
529 if (Grp.end() == false)
530 return true;
531
532 // Get a structure
533 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
534 if (unlikely(Group == 0))
535 return false;
536
537 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
538 map_ptrloc const idxName = WriteStringInMap(Name);
539 if (unlikely(idxName == 0))
540 return false;
541 Grp->Name = idxName;
542
543 // Insert it into the hash table
544 unsigned long const Hash = Cache.Hash(Name);
545 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
546 Cache.HeaderP->GrpHashTable[Hash] = Group;
547
548 Grp->ID = Cache.HeaderP->GroupCount++;
549 return true;
550 }
551 /*}}}*/
552 // CacheGenerator::NewPackage - Add a new package /*{{{*/
553 // ---------------------------------------------------------------------
554 /* This creates a new package structure and adds it to the hash table */
555 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
556 const string &Arch) {
557 pkgCache::GrpIterator Grp;
558 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
559 if (unlikely(NewGroup(Grp, Name) == false))
560 return false;
561
562 Pkg = Grp.FindPkg(Arch);
563 if (Pkg.end() == false)
564 return true;
565
566 // Get a structure
567 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
568 if (unlikely(Package == 0))
569 return false;
570 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
571
572 // Insert the package into our package list
573 if (Grp->FirstPackage == 0) // the group is new
574 {
575 // Insert it into the hash table
576 unsigned long const Hash = Cache.Hash(Name);
577 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
578 Cache.HeaderP->PkgHashTable[Hash] = Package;
579 Grp->FirstPackage = Package;
580 }
581 else // Group the Packages together
582 {
583 // this package is the new last package
584 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
585 Pkg->NextPackage = LastPkg->NextPackage;
586 LastPkg->NextPackage = Package;
587 }
588 Grp->LastPackage = Package;
589
590 // Set the name, arch and the ID
591 Pkg->Name = Grp->Name;
592 Pkg->Group = Grp.Index();
593 // all is mapped to the native architecture
594 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
595 if (unlikely(idxArch == 0))
596 return false;
597 Pkg->Arch = idxArch;
598 Pkg->ID = Cache.HeaderP->PackageCount++;
599
600 return true;
601 }
602 /*}}}*/
603 // CacheGenerator::AddImplicitDepends /*{{{*/
604 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
605 pkgCache::PkgIterator &P,
606 pkgCache::VerIterator &V)
607 {
608 // copy P.Arch() into a string here as a cache remap
609 // in NewDepends() later may alter the pointer location
610 string Arch = P.Arch() == NULL ? "" : P.Arch();
611 map_ptrloc *OldDepLast = NULL;
612 /* MultiArch handling introduces a lot of implicit Dependencies:
613 - MultiArch: same → Co-Installable if they have the same version
614 - All others conflict with all other group members */
615 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
616 pkgCache::PkgIterator D = G.PackageList();
617 Dynamic<pkgCache::PkgIterator> DynD(D);
618 for (; D.end() != true; D = G.NextPkg(D))
619 {
620 if (Arch == D.Arch() || D->VersionList == 0)
621 continue;
622 /* We allow only one installed arch at the time
623 per group, therefore each group member conflicts
624 with all other group members */
625 if (coInstall == true)
626 {
627 // Replaces: ${self}:other ( << ${binary:Version})
628 NewDepends(D, V, V.VerStr(),
629 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
630 OldDepLast);
631 // Breaks: ${self}:other (!= ${binary:Version})
632 NewDepends(D, V, V.VerStr(),
633 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
634 OldDepLast);
635 } else {
636 // Conflicts: ${self}:other
637 NewDepends(D, V, "",
638 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
639 OldDepLast);
640 }
641 }
642 return true;
643 }
644 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
645 pkgCache::PkgIterator &D)
646 {
647 /* MultiArch handling introduces a lot of implicit Dependencies:
648 - MultiArch: same → Co-Installable if they have the same version
649 - All others conflict with all other group members */
650 map_ptrloc *OldDepLast = NULL;
651 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
652 if (coInstall == true)
653 {
654 // Replaces: ${self}:other ( << ${binary:Version})
655 NewDepends(D, V, V.VerStr(),
656 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
657 OldDepLast);
658 // Breaks: ${self}:other (!= ${binary:Version})
659 NewDepends(D, V, V.VerStr(),
660 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
661 OldDepLast);
662 } else {
663 // Conflicts: ${self}:other
664 NewDepends(D, V, "",
665 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
666 OldDepLast);
667 }
668 return true;
669 }
670
671 /*}}}*/
672 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
673 // ---------------------------------------------------------------------
674 /* */
675 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
676 ListParser &List)
677 {
678 if (CurrentFile == 0)
679 return true;
680
681 // Get a structure
682 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
683 if (VerFile == 0)
684 return 0;
685
686 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
687 VF->File = CurrentFile - Cache.PkgFileP;
688
689 // Link it to the end of the list
690 map_ptrloc *Last = &Ver->FileList;
691 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
692 Last = &V->NextFile;
693 VF->NextFile = *Last;
694 *Last = VF.Index();
695
696 VF->Offset = List.Offset();
697 VF->Size = List.Size();
698 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
699 Cache.HeaderP->MaxVerFileSize = VF->Size;
700 Cache.HeaderP->VerFileCount++;
701
702 return true;
703 }
704 /*}}}*/
705 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
706 // ---------------------------------------------------------------------
707 /* This puts a version structure in the linked list */
708 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
709 const string &VerStr,
710 unsigned long Next)
711 {
712 // Get a structure
713 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
714 if (Version == 0)
715 return 0;
716
717 // Fill it in
718 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
719 Ver->NextVer = Next;
720 Ver->ID = Cache.HeaderP->VersionCount++;
721 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
722 if (unlikely(idxVerStr == 0))
723 return 0;
724 Ver->VerStr = idxVerStr;
725
726 return Version;
727 }
728 /*}}}*/
729 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
730 // ---------------------------------------------------------------------
731 /* */
732 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
733 ListParser &List)
734 {
735 if (CurrentFile == 0)
736 return true;
737
738 // Get a structure
739 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
740 if (DescFile == 0)
741 return false;
742
743 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
744 DF->File = CurrentFile - Cache.PkgFileP;
745
746 // Link it to the end of the list
747 map_ptrloc *Last = &Desc->FileList;
748 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
749 Last = &D->NextFile;
750
751 DF->NextFile = *Last;
752 *Last = DF.Index();
753
754 DF->Offset = List.Offset();
755 DF->Size = List.Size();
756 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
757 Cache.HeaderP->MaxDescFileSize = DF->Size;
758 Cache.HeaderP->DescFileCount++;
759
760 return true;
761 }
762 /*}}}*/
763 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
764 // ---------------------------------------------------------------------
765 /* This puts a description structure in the linked list */
766 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
767 const string &Lang,
768 const MD5SumValue &md5sum,
769 map_ptrloc Next)
770 {
771 // Get a structure
772 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
773 if (Description == 0)
774 return 0;
775
776 // Fill it in
777 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
778 Desc->NextDesc = Next;
779 Desc->ID = Cache.HeaderP->DescriptionCount++;
780 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
781 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
782 if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0))
783 return 0;
784 Desc->language_code = idxlanguage_code;
785 Desc->md5sum = idxmd5sum;
786
787 return Description;
788 }
789 /*}}}*/
790 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
791 // ---------------------------------------------------------------------
792 /* This creates a dependency element in the tree. It is linked to the
793 version and to the package that it is pointing to. */
794 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
795 pkgCache::VerIterator &Ver,
796 string const &Version,
797 unsigned int const &Op,
798 unsigned int const &Type,
799 map_ptrloc* &OldDepLast)
800 {
801 void const * const oldMap = Map.Data();
802 // Get a structure
803 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
804 if (unlikely(Dependency == 0))
805 return false;
806
807 // Fill it in
808 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
809 Dynamic<pkgCache::DepIterator> DynDep(Dep);
810 Dep->ParentVer = Ver.Index();
811 Dep->Type = Type;
812 Dep->CompareOp = Op;
813 Dep->ID = Cache.HeaderP->DependsCount++;
814
815 // Probe the reverse dependency list for a version string that matches
816 if (Version.empty() == false)
817 {
818 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
819 if (I->Version != 0 && I.TargetVer() == Version)
820 Dep->Version = I->Version;*/
821 if (Dep->Version == 0) {
822 map_ptrloc const index = WriteStringInMap(Version);
823 if (unlikely(index == 0))
824 return false;
825 Dep->Version = index;
826 }
827 }
828
829 // Link it to the package
830 Dep->Package = Pkg.Index();
831 Dep->NextRevDepends = Pkg->RevDepends;
832 Pkg->RevDepends = Dep.Index();
833
834 // Do we know where to link the Dependency to?
835 if (OldDepLast == NULL)
836 {
837 OldDepLast = &Ver->DependsList;
838 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
839 OldDepLast = &D->NextDepends;
840 } else if (oldMap != Map.Data())
841 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
842
843 Dep->NextDepends = *OldDepLast;
844 *OldDepLast = Dep.Index();
845 OldDepLast = &Dep->NextDepends;
846
847 return true;
848 }
849 /*}}}*/
850 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
851 // ---------------------------------------------------------------------
852 /* This creates a Group and the Package to link this dependency to if
853 needed and handles also the caching of the old endpoint */
854 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
855 const string &PackageName,
856 const string &Arch,
857 const string &Version,
858 unsigned int Op,
859 unsigned int Type)
860 {
861 pkgCache::GrpIterator Grp;
862 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
863 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
864 return false;
865
866 // Locate the target package
867 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
868 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
869 if (Pkg.end() == true) {
870 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
871 return false;
872 }
873
874 // Is it a file dependency?
875 if (unlikely(PackageName[0] == '/'))
876 FoundFileDeps = true;
877
878 /* Caching the old end point speeds up generation substantially */
879 if (OldDepVer != Ver) {
880 OldDepLast = NULL;
881 OldDepVer = Ver;
882 }
883
884 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
885 }
886 /*}}}*/
887 // ListParser::NewProvides - Create a Provides element /*{{{*/
888 // ---------------------------------------------------------------------
889 /* */
890 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
891 const string &PkgName,
892 const string &PkgArch,
893 const string &Version)
894 {
895 pkgCache &Cache = Owner->Cache;
896
897 // We do not add self referencing provides
898 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
899 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
900 return true;
901
902 // Get a structure
903 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
904 if (unlikely(Provides == 0))
905 return false;
906 Cache.HeaderP->ProvidesCount++;
907
908 // Fill it in
909 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
910 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
911 Prv->Version = Ver.Index();
912 Prv->NextPkgProv = Ver->ProvidesList;
913 Ver->ProvidesList = Prv.Index();
914 if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
915 return false;
916
917 // Locate the target package
918 pkgCache::PkgIterator Pkg;
919 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
920 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
921 return false;
922
923 // Link it to the package
924 Prv->ParentPkg = Pkg.Index();
925 Prv->NextProvides = Pkg->ProvidesList;
926 Pkg->ProvidesList = Prv.Index();
927
928 return true;
929 }
930 /*}}}*/
931 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
932 // ---------------------------------------------------------------------
933 /* This is used to select which file is to be associated with all newly
934 added versions. The caller is responsible for setting the IMS fields. */
935 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
936 const pkgIndexFile &Index,
937 unsigned long Flags)
938 {
939 // Get some space for the structure
940 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
941 if (unlikely(idxFile == 0))
942 return false;
943 CurrentFile = Cache.PkgFileP + idxFile;
944
945 // Fill it in
946 map_ptrloc const idxFileName = WriteStringInMap(File);
947 map_ptrloc const idxSite = WriteUniqString(Site);
948 if (unlikely(idxFileName == 0 || idxSite == 0))
949 return false;
950 CurrentFile->FileName = idxFileName;
951 CurrentFile->Site = idxSite;
952 CurrentFile->NextFile = Cache.HeaderP->FileList;
953 CurrentFile->Flags = Flags;
954 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
955 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
956 if (unlikely(idxIndexType == 0))
957 return false;
958 CurrentFile->IndexType = idxIndexType;
959 PkgFileName = File;
960 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
961 Cache.HeaderP->PackageFileCount++;
962
963 if (Progress != 0)
964 Progress->SubProgress(Index.Size());
965 return true;
966 }
967 /*}}}*/
968 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
969 // ---------------------------------------------------------------------
970 /* This is used to create handles to strings. Given the same text it
971 always returns the same number */
972 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
973 unsigned int Size)
974 {
975 /* We use a very small transient hash table here, this speeds up generation
976 by a fair amount on slower machines */
977 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
978 if (Bucket != 0 &&
979 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
980 return Bucket->String;
981
982 // Search for an insertion point
983 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
984 int Res = 1;
985 map_ptrloc *Last = &Cache.HeaderP->StringList;
986 for (; I != Cache.StringItemP; Last = &I->NextItem,
987 I = Cache.StringItemP + I->NextItem)
988 {
989 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
990 if (Res >= 0)
991 break;
992 }
993
994 // Match
995 if (Res == 0)
996 {
997 Bucket = I;
998 return I->String;
999 }
1000
1001 // Get a structure
1002 void const * const oldMap = Map.Data();
1003 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1004 if (Item == 0)
1005 return 0;
1006
1007 map_ptrloc const idxString = WriteStringInMap(S,Size);
1008 if (unlikely(idxString == 0))
1009 return 0;
1010 if (oldMap != Map.Data()) {
1011 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1012 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1013 }
1014 *Last = Item;
1015
1016 // Fill in the structure
1017 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1018 ItemP->NextItem = I - Cache.StringItemP;
1019 ItemP->String = idxString;
1020
1021 Bucket = ItemP;
1022 return ItemP->String;
1023 }
1024 /*}}}*/
1025 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1026 // ---------------------------------------------------------------------
1027 /* This just verifies that each file in the list of index files exists,
1028 has matching attributes with the cache and the cache does not have
1029 any extra files. */
1030 static bool CheckValidity(const string &CacheFile,
1031 pkgSourceList &List,
1032 FileIterator Start,
1033 FileIterator End,
1034 MMap **OutMap = 0)
1035 {
1036 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1037 // No file, certainly invalid
1038 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1039 {
1040 if (Debug == true)
1041 std::clog << "CacheFile doesn't exist" << std::endl;
1042 return false;
1043 }
1044
1045 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1046 {
1047 if (Debug == true)
1048 std::clog << "sources.list is newer than the cache" << std::endl;
1049 return false;
1050 }
1051
1052 // Map it
1053 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1054 SPtr<MMap> Map = new MMap(CacheF,0);
1055 pkgCache Cache(Map);
1056 if (_error->PendingError() == true || Map->Size() == 0)
1057 {
1058 if (Debug == true)
1059 std::clog << "Errors are pending or Map is empty()" << std::endl;
1060 _error->Discard();
1061 return false;
1062 }
1063
1064 /* Now we check every index file, see if it is in the cache,
1065 verify the IMS data and check that it is on the disk too.. */
1066 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1067 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1068 for (; Start != End; ++Start)
1069 {
1070 if (Debug == true)
1071 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1072 if ((*Start)->HasPackages() == false)
1073 {
1074 if (Debug == true)
1075 std::clog << "Has NO packages" << std::endl;
1076 continue;
1077 }
1078
1079 if ((*Start)->Exists() == false)
1080 {
1081 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1082 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1083 (*Start)->Describe().c_str());
1084 #endif
1085 if (Debug == true)
1086 std::clog << "file doesn't exist" << std::endl;
1087 continue;
1088 }
1089
1090 // FindInCache is also expected to do an IMS check.
1091 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1092 if (File.end() == true)
1093 {
1094 if (Debug == true)
1095 std::clog << "FindInCache returned end-Pointer" << std::endl;
1096 return false;
1097 }
1098
1099 Visited[File->ID] = true;
1100 if (Debug == true)
1101 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1102 }
1103
1104 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1105 if (Visited[I] == false)
1106 {
1107 if (Debug == true)
1108 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1109 return false;
1110 }
1111
1112 if (_error->PendingError() == true)
1113 {
1114 if (Debug == true)
1115 {
1116 std::clog << "Validity failed because of pending errors:" << std::endl;
1117 _error->DumpErrors();
1118 }
1119 _error->Discard();
1120 return false;
1121 }
1122
1123 if (OutMap != 0)
1124 *OutMap = Map.UnGuard();
1125 return true;
1126 }
1127 /*}}}*/
1128 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1129 // ---------------------------------------------------------------------
1130 /* Size is kind of an abstract notion that is only used for the progress
1131 meter */
1132 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1133 {
1134 unsigned long TotalSize = 0;
1135 for (; Start != End; ++Start)
1136 {
1137 if ((*Start)->HasPackages() == false)
1138 continue;
1139 TotalSize += (*Start)->Size();
1140 }
1141 return TotalSize;
1142 }
1143 /*}}}*/
1144 // BuildCache - Merge the list of index files into the cache /*{{{*/
1145 // ---------------------------------------------------------------------
1146 /* */
1147 static bool BuildCache(pkgCacheGenerator &Gen,
1148 OpProgress *Progress,
1149 unsigned long &CurrentSize,unsigned long TotalSize,
1150 FileIterator Start, FileIterator End)
1151 {
1152 FileIterator I;
1153 for (I = Start; I != End; ++I)
1154 {
1155 if ((*I)->HasPackages() == false)
1156 continue;
1157
1158 if ((*I)->Exists() == false)
1159 continue;
1160
1161 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1162 {
1163 _error->Warning("Duplicate sources.list entry %s",
1164 (*I)->Describe().c_str());
1165 continue;
1166 }
1167
1168 unsigned long Size = (*I)->Size();
1169 if (Progress != NULL)
1170 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1171 CurrentSize += Size;
1172
1173 if ((*I)->Merge(Gen,Progress) == false)
1174 return false;
1175 }
1176
1177 if (Gen.HasFileDeps() == true)
1178 {
1179 if (Progress != NULL)
1180 Progress->Done();
1181 TotalSize = ComputeSize(Start, End);
1182 CurrentSize = 0;
1183 for (I = Start; I != End; ++I)
1184 {
1185 unsigned long Size = (*I)->Size();
1186 if (Progress != NULL)
1187 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1188 CurrentSize += Size;
1189 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1190 return false;
1191 }
1192 }
1193
1194 return true;
1195 }
1196 /*}}}*/
1197 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1198 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1199 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1200 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1201 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1202 Flags |= MMap::Moveable;
1203 if (_config->FindB("APT::Cache-Fallback", false) == true)
1204 Flags |= MMap::Fallback;
1205 if (CacheF != NULL)
1206 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1207 else
1208 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1209 }
1210 /*}}}*/
1211 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1212 // ---------------------------------------------------------------------
1213 /* This makes sure that the status cache (the cache that has all
1214 index files from the sources list and all local ones) is ready
1215 to be mmaped. If OutMap is not zero then a MMap object representing
1216 the cache will be stored there. This is pretty much mandetory if you
1217 are using AllowMem. AllowMem lets the function be run as non-root
1218 where it builds the cache 'fast' into a memory buffer. */
1219 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1220 MMap **OutMap, bool AllowMem)
1221 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1222 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1223 MMap **OutMap,bool AllowMem)
1224 {
1225 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1226
1227 std::vector<pkgIndexFile *> Files;
1228 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1229 i != List.end();
1230 ++i)
1231 {
1232 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1233 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1234 j != Indexes->end();
1235 ++j)
1236 Files.push_back (*j);
1237 }
1238
1239 unsigned long const EndOfSource = Files.size();
1240 if (_system->AddStatusFiles(Files) == false)
1241 return false;
1242
1243 // Decide if we can write to the files..
1244 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1245 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1246
1247 // ensure the cache directory exists
1248 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1249 {
1250 string dir = _config->FindDir("Dir::Cache");
1251 size_t const len = dir.size();
1252 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1253 dir = dir.substr(0, len - 5);
1254 if (CacheFile.empty() == false)
1255 CreateDirectory(dir, flNotFile(CacheFile));
1256 if (SrcCacheFile.empty() == false)
1257 CreateDirectory(dir, flNotFile(SrcCacheFile));
1258 }
1259
1260 // Decide if we can write to the cache
1261 bool Writeable = false;
1262 if (CacheFile.empty() == false)
1263 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1264 else
1265 if (SrcCacheFile.empty() == false)
1266 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1267 if (Debug == true)
1268 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1269
1270 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1271 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1272
1273 if (Progress != NULL)
1274 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1275
1276 // Cache is OK, Fin.
1277 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1278 {
1279 if (Progress != NULL)
1280 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1281 if (Debug == true)
1282 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1283 return true;
1284 }
1285 else if (Debug == true)
1286 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1287
1288 /* At this point we know we need to reconstruct the package cache,
1289 begin. */
1290 SPtr<FileFd> CacheF;
1291 SPtr<DynamicMMap> Map;
1292 if (Writeable == true && CacheFile.empty() == false)
1293 {
1294 _error->PushToStack();
1295 unlink(CacheFile.c_str());
1296 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1297 fchmod(CacheF->Fd(),0644);
1298 Map = CreateDynamicMMap(CacheF, MMap::Public);
1299 if (_error->PendingError() == true)
1300 {
1301 delete CacheF.UnGuard();
1302 delete Map.UnGuard();
1303 if (Debug == true)
1304 std::clog << "Open filebased MMap FAILED" << std::endl;
1305 Writeable = false;
1306 if (AllowMem == false)
1307 {
1308 _error->MergeWithStack();
1309 return false;
1310 }
1311 _error->RevertToStack();
1312 }
1313 else if (Debug == true)
1314 {
1315 _error->MergeWithStack();
1316 std::clog << "Open filebased MMap" << std::endl;
1317 }
1318 }
1319 if (Writeable == false || CacheFile.empty() == true)
1320 {
1321 // Just build it in memory..
1322 Map = CreateDynamicMMap(NULL);
1323 if (Debug == true)
1324 std::clog << "Open memory Map (not filebased)" << std::endl;
1325 }
1326
1327 // Lets try the source cache.
1328 unsigned long CurrentSize = 0;
1329 unsigned long TotalSize = 0;
1330 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1331 Files.begin()+EndOfSource) == true)
1332 {
1333 if (Debug == true)
1334 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1335 // Preload the map with the source cache
1336 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1337 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1338 if ((alloc == 0 && _error->PendingError())
1339 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1340 SCacheF.Size()) == false)
1341 return false;
1342
1343 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1344
1345 // Build the status cache
1346 pkgCacheGenerator Gen(Map.Get(),Progress);
1347 if (_error->PendingError() == true)
1348 return false;
1349 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1350 Files.begin()+EndOfSource,Files.end()) == false)
1351 return false;
1352 }
1353 else
1354 {
1355 if (Debug == true)
1356 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1357 TotalSize = ComputeSize(Files.begin(),Files.end());
1358
1359 // Build the source cache
1360 pkgCacheGenerator Gen(Map.Get(),Progress);
1361 if (_error->PendingError() == true)
1362 return false;
1363 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1364 Files.begin(),Files.begin()+EndOfSource) == false)
1365 return false;
1366
1367 // Write it back
1368 if (Writeable == true && SrcCacheFile.empty() == false)
1369 {
1370 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1371 if (_error->PendingError() == true)
1372 return false;
1373
1374 fchmod(SCacheF.Fd(),0644);
1375
1376 // Write out the main data
1377 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1378 return _error->Error(_("IO Error saving source cache"));
1379 SCacheF.Sync();
1380
1381 // Write out the proper header
1382 Gen.GetCache().HeaderP->Dirty = false;
1383 if (SCacheF.Seek(0) == false ||
1384 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1385 return _error->Error(_("IO Error saving source cache"));
1386 Gen.GetCache().HeaderP->Dirty = true;
1387 SCacheF.Sync();
1388 }
1389
1390 // Build the status cache
1391 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1392 Files.begin()+EndOfSource,Files.end()) == false)
1393 return false;
1394 }
1395 if (Debug == true)
1396 std::clog << "Caches are ready for shipping" << std::endl;
1397
1398 if (_error->PendingError() == true)
1399 return false;
1400 if (OutMap != 0)
1401 {
1402 if (CacheF != 0)
1403 {
1404 delete Map.UnGuard();
1405 *OutMap = new MMap(*CacheF,0);
1406 }
1407 else
1408 {
1409 *OutMap = Map.UnGuard();
1410 }
1411 }
1412
1413 return true;
1414 }
1415 /*}}}*/
1416 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1417 // ---------------------------------------------------------------------
1418 /* */
1419 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1420 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1421 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1422 {
1423 std::vector<pkgIndexFile *> Files;
1424 unsigned long EndOfSource = Files.size();
1425 if (_system->AddStatusFiles(Files) == false)
1426 return false;
1427
1428 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1429 unsigned long CurrentSize = 0;
1430 unsigned long TotalSize = 0;
1431
1432 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1433
1434 // Build the status cache
1435 if (Progress != NULL)
1436 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1437 pkgCacheGenerator Gen(Map.Get(),Progress);
1438 if (_error->PendingError() == true)
1439 return false;
1440 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1441 Files.begin()+EndOfSource,Files.end()) == false)
1442 return false;
1443
1444 if (_error->PendingError() == true)
1445 return false;
1446 *OutMap = Map.UnGuard();
1447
1448 return true;
1449 }
1450 /*}}}*/
1451 // IsDuplicateDescription /*{{{*/
1452 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1453 MD5SumValue const &CurMd5, std::string const &CurLang)
1454 {
1455 // Descriptions in the same link-list have all the same md5
1456 if (MD5SumValue(Desc.md5()) != CurMd5)
1457 return false;
1458 for (; Desc.end() == false; ++Desc)
1459 if (Desc.LanguageCode() == CurLang)
1460 return true;
1461 return false;
1462 }
1463 /*}}}*/
1464 // CacheGenerator::FinishCache /*{{{*/
1465 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1466 {
1467 return true;
1468 }
1469 /*}}}*/