cleanup headers and especially #includes everywhere
[ntk/apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/strutl.h>
22 #include <apt-pkg/sptr.h>
23 #include <apt-pkg/pkgsystem.h>
24 #include <apt-pkg/macros.h>
25 #include <apt-pkg/metaindex.h>
26 #include <apt-pkg/fileutl.h>
27 #include <apt-pkg/hashsum_template.h>
28 #include <apt-pkg/indexfile.h>
29 #include <apt-pkg/md5.h>
30 #include <apt-pkg/mmap.h>
31 #include <apt-pkg/pkgcache.h>
32 #include <apt-pkg/cacheiterators.h>
33
34 #include <stddef.h>
35 #include <string.h>
36 #include <iostream>
37 #include <string>
38 #include <vector>
39 #include <sys/stat.h>
40 #include <unistd.h>
41
42 #include <apti18n.h>
43 /*}}}*/
44 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
45 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
46
47 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
48 MD5SumValue const &CurMd5, std::string const &CurLang);
49
50 using std::string;
51
52 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
53 // ---------------------------------------------------------------------
54 /* We set the dirty flag and make sure that is written to the disk */
55 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
56 Map(*pMap), Cache(pMap,false), Progress(Prog),
57 FoundFileDeps(0)
58 {
59 CurrentFile = 0;
60 memset(UniqHash,0,sizeof(UniqHash));
61
62 if (_error->PendingError() == true)
63 return;
64
65 if (Map.Size() == 0)
66 {
67 // Setup the map interface..
68 Cache.HeaderP = (pkgCache::Header *)Map.Data();
69 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
70 return;
71
72 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
73
74 // Starting header
75 *Cache.HeaderP = pkgCache::Header();
76 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
77 Cache.HeaderP->VerSysName = idxVerSysName;
78 // this pointer is set in ReMap, but we need it now for WriteUniqString
79 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
80 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
81 Cache.HeaderP->Architecture = idxArchitecture;
82 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
83 return;
84 Cache.ReMap();
85 }
86 else
87 {
88 // Map directly from the existing file
89 Cache.ReMap();
90 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
91 if (Cache.VS != _system->VS)
92 {
93 _error->Error(_("Cache has an incompatible versioning system"));
94 return;
95 }
96 }
97
98 Cache.HeaderP->Dirty = true;
99 Map.Sync(0,sizeof(pkgCache::Header));
100 }
101 /*}}}*/
102 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
103 // ---------------------------------------------------------------------
104 /* We sync the data then unset the dirty flag in two steps so as to
105 advoid a problem during a crash */
106 pkgCacheGenerator::~pkgCacheGenerator()
107 {
108 if (_error->PendingError() == true)
109 return;
110 if (Map.Sync() == false)
111 return;
112
113 Cache.HeaderP->Dirty = false;
114 Cache.HeaderP->CacheFileSize = Map.Size();
115 Map.Sync(0,sizeof(pkgCache::Header));
116 }
117 /*}}}*/
118 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
119 if (oldMap == newMap)
120 return;
121
122 if (_config->FindB("Debug::pkgCacheGen", false))
123 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
124
125 Cache.ReMap(false);
126
127 CurrentFile += (pkgCache::PackageFile const * const) newMap - (pkgCache::PackageFile const * const) oldMap;
128
129 for (size_t i = 0; i < _count(UniqHash); ++i)
130 if (UniqHash[i] != 0)
131 UniqHash[i] += (pkgCache::StringItem const * const) newMap - (pkgCache::StringItem const * const) oldMap;
132
133 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
134 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
135 (*i)->ReMap(oldMap, newMap);
136 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
137 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
138 (*i)->ReMap(oldMap, newMap);
139 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
140 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
141 (*i)->ReMap(oldMap, newMap);
142 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
143 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
144 (*i)->ReMap(oldMap, newMap);
145 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
146 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
147 (*i)->ReMap(oldMap, newMap);
148 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
149 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
150 (*i)->ReMap(oldMap, newMap);
151 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
152 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
153 (*i)->ReMap(oldMap, newMap);
154 } /*}}}*/
155 // CacheGenerator::WriteStringInMap /*{{{*/
156 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
157 const unsigned long &Len) {
158 void const * const oldMap = Map.Data();
159 map_ptrloc const index = Map.WriteString(String, Len);
160 if (index != 0)
161 ReMap(oldMap, Map.Data());
162 return index;
163 }
164 /*}}}*/
165 // CacheGenerator::WriteStringInMap /*{{{*/
166 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
167 void const * const oldMap = Map.Data();
168 map_ptrloc const index = Map.WriteString(String);
169 if (index != 0)
170 ReMap(oldMap, Map.Data());
171 return index;
172 }
173 /*}}}*/
174 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
175 void const * const oldMap = Map.Data();
176 map_ptrloc const index = Map.Allocate(size);
177 if (index != 0)
178 ReMap(oldMap, Map.Data());
179 return index;
180 }
181 /*}}}*/
182 // CacheGenerator::MergeList - Merge the package list /*{{{*/
183 // ---------------------------------------------------------------------
184 /* This provides the generation of the entries in the cache. Each loop
185 goes through a single package record from the underlying parse engine. */
186 bool pkgCacheGenerator::MergeList(ListParser &List,
187 pkgCache::VerIterator *OutVer)
188 {
189 List.Owner = this;
190
191 unsigned int Counter = 0;
192 while (List.Step() == true)
193 {
194 string const PackageName = List.Package();
195 if (PackageName.empty() == true)
196 return false;
197
198 Counter++;
199 if (Counter % 100 == 0 && Progress != 0)
200 Progress->Progress(List.Offset());
201
202 string Arch = List.Architecture();
203 string const Version = List.Version();
204 if (Version.empty() == true && Arch.empty() == true)
205 {
206 // package descriptions
207 if (MergeListGroup(List, PackageName) == false)
208 return false;
209 continue;
210 }
211
212 if (Arch.empty() == true)
213 {
214 // use the pseudo arch 'none' for arch-less packages
215 Arch = "none";
216 /* We might built a SingleArchCache here, which we don't want to blow up
217 just for these :none packages to a proper MultiArchCache, so just ensure
218 that we have always a native package structure first for SingleArch */
219 pkgCache::PkgIterator NP;
220 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
221 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
222 // TRANSLATOR: The first placeholder is a package name,
223 // the other two should be copied verbatim as they include debug info
224 return _error->Error(_("Error occurred while processing %s (%s%d)"),
225 PackageName.c_str(), "NewPackage", 0);
226 }
227
228 // Get a pointer to the package structure
229 pkgCache::PkgIterator Pkg;
230 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
231 if (NewPackage(Pkg, PackageName, Arch) == false)
232 // TRANSLATOR: The first placeholder is a package name,
233 // the other two should be copied verbatim as they include debug info
234 return _error->Error(_("Error occurred while processing %s (%s%d)"),
235 PackageName.c_str(), "NewPackage", 1);
236
237
238 if (Version.empty() == true)
239 {
240 if (MergeListPackage(List, Pkg) == false)
241 return false;
242 }
243 else
244 {
245 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
246 return false;
247 }
248
249 if (OutVer != 0)
250 {
251 FoundFileDeps |= List.HasFileDeps();
252 return true;
253 }
254 }
255
256 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
257 return _error->Error(_("Wow, you exceeded the number of package "
258 "names this APT is capable of."));
259 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
260 return _error->Error(_("Wow, you exceeded the number of versions "
261 "this APT is capable of."));
262 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
263 return _error->Error(_("Wow, you exceeded the number of descriptions "
264 "this APT is capable of."));
265 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
266 return _error->Error(_("Wow, you exceeded the number of dependencies "
267 "this APT is capable of."));
268
269 FoundFileDeps |= List.HasFileDeps();
270 return true;
271 }
272 // CacheGenerator::MergeListGroup /*{{{*/
273 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
274 {
275 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
276 // a group has no data on it's own, only packages have it but these
277 // stanzas like this come from Translation- files to add descriptions,
278 // but without a version we don't need a description for it…
279 if (Grp.end() == true)
280 return true;
281 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
282
283 pkgCache::PkgIterator Pkg;
284 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
285 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
286 if (MergeListPackage(List, Pkg) == false)
287 return false;
288
289 return true;
290 }
291 /*}}}*/
292 // CacheGenerator::MergeListPackage /*{{{*/
293 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
294 {
295 // we first process the package, then the descriptions
296 // (for deb this package processing is in fact a no-op)
297 pkgCache::VerIterator Ver(Cache);
298 Dynamic<pkgCache::VerIterator> DynVer(Ver);
299 if (List.UsePackage(Pkg, Ver) == false)
300 return _error->Error(_("Error occurred while processing %s (%s%d)"),
301 Pkg.Name(), "UsePackage", 1);
302
303 // Find the right version to write the description
304 MD5SumValue CurMd5 = List.Description_md5();
305 if (CurMd5.Value().empty() == true || List.Description().empty() == true)
306 return true;
307 std::string CurLang = List.DescriptionLanguage();
308
309 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
310 {
311 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
312
313 // a version can only have one md5 describing it
314 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
315 continue;
316
317 // don't add a new description if we have one for the given
318 // md5 && language
319 if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
320 continue;
321
322 pkgCache::DescIterator Desc;
323 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
324
325 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
326 if (unlikely(descindex == 0 && _error->PendingError()))
327 return _error->Error(_("Error occurred while processing %s (%s%d)"),
328 Pkg.Name(), "NewDescription", 1);
329
330 Desc->ParentPkg = Pkg.Index();
331
332 // we add at the end, so that the start is constant as we need
333 // that to be able to efficiently share these lists
334 VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
335 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
336 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
337 *LastNextDesc = descindex;
338
339 if (NewFileDesc(Desc,List) == false)
340 return _error->Error(_("Error occurred while processing %s (%s%d)"),
341 Pkg.Name(), "NewFileDesc", 1);
342
343 // we can stop here as all "same" versions will share the description
344 break;
345 }
346
347 return true;
348 }
349 /*}}}*/
350 // CacheGenerator::MergeListVersion /*{{{*/
351 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
352 std::string const &Version, pkgCache::VerIterator* &OutVer)
353 {
354 pkgCache::VerIterator Ver = Pkg.VersionList();
355 Dynamic<pkgCache::VerIterator> DynVer(Ver);
356 map_ptrloc *LastVer = &Pkg->VersionList;
357 void const * oldMap = Map.Data();
358
359 unsigned long const Hash = List.VersionHash();
360 if (Ver.end() == false)
361 {
362 /* We know the list is sorted so we use that fact in the search.
363 Insertion of new versions is done with correct sorting */
364 int Res = 1;
365 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
366 {
367 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
368 // Version is higher as current version - insert here
369 if (Res > 0)
370 break;
371 // Versionstrings are equal - is hash also equal?
372 if (Res == 0 && Ver->Hash == Hash)
373 break;
374 // proceed with the next till we have either the right
375 // or we found another version (which will be lower)
376 }
377
378 /* We already have a version for this item, record that we saw it */
379 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
380 {
381 if (List.UsePackage(Pkg,Ver) == false)
382 return _error->Error(_("Error occurred while processing %s (%s%d)"),
383 Pkg.Name(), "UsePackage", 2);
384
385 if (NewFileVer(Ver,List) == false)
386 return _error->Error(_("Error occurred while processing %s (%s%d)"),
387 Pkg.Name(), "NewFileVer", 1);
388
389 // Read only a single record and return
390 if (OutVer != 0)
391 {
392 *OutVer = Ver;
393 return true;
394 }
395
396 return true;
397 }
398 }
399
400 // Add a new version
401 map_ptrloc const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
402 if (verindex == 0 && _error->PendingError())
403 return _error->Error(_("Error occurred while processing %s (%s%d)"),
404 Pkg.Name(), "NewVersion", 1);
405
406 if (oldMap != Map.Data())
407 LastVer += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
408 *LastVer = verindex;
409
410 if (unlikely(List.NewVersion(Ver) == false))
411 return _error->Error(_("Error occurred while processing %s (%s%d)"),
412 Pkg.Name(), "NewVersion", 2);
413
414 if (unlikely(List.UsePackage(Pkg,Ver) == false))
415 return _error->Error(_("Error occurred while processing %s (%s%d)"),
416 Pkg.Name(), "UsePackage", 3);
417
418 if (unlikely(NewFileVer(Ver,List) == false))
419 return _error->Error(_("Error occurred while processing %s (%s%d)"),
420 Pkg.Name(), "NewFileVer", 2);
421
422 pkgCache::GrpIterator Grp = Pkg.Group();
423 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
424
425 /* If it is the first version of this package we need to add implicit
426 Multi-Arch dependencies to all other package versions in the group now -
427 otherwise we just add them for this new version */
428 if (Pkg.VersionList()->NextVer == 0)
429 {
430 pkgCache::PkgIterator P = Grp.PackageList();
431 Dynamic<pkgCache::PkgIterator> DynP(P);
432 for (; P.end() != true; P = Grp.NextPkg(P))
433 {
434 if (P->ID == Pkg->ID)
435 continue;
436 pkgCache::VerIterator V = P.VersionList();
437 Dynamic<pkgCache::VerIterator> DynV(V);
438 for (; V.end() != true; ++V)
439 if (unlikely(AddImplicitDepends(V, Pkg) == false))
440 return _error->Error(_("Error occurred while processing %s (%s%d)"),
441 Pkg.Name(), "AddImplicitDepends", 1);
442 }
443 /* :none packages are packages without an architecture. They are forbidden by
444 debian-policy, so usually they will only be in (old) dpkg status files -
445 and dpkg will complain about them - and are pretty rare. We therefore do
446 usually not create conflicts while the parent is created, but only if a :none
447 package (= the target) appears. This creates incorrect dependencies on :none
448 for architecture-specific dependencies on the package we copy from, but we
449 will ignore this bug as architecture-specific dependencies are only allowed
450 in jessie and until then the :none packages should be extinct (hopefully).
451 In other words: This should work long enough to allow graceful removal of
452 these packages, it is not supposed to allow users to keep using them … */
453 if (strcmp(Pkg.Arch(), "none") == 0)
454 {
455 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
456 if (M.end() == false && Pkg != M)
457 {
458 pkgCache::DepIterator D = M.RevDependsList();
459 Dynamic<pkgCache::DepIterator> DynD(D);
460 for (; D.end() == false; ++D)
461 {
462 if ((D->Type != pkgCache::Dep::Conflicts &&
463 D->Type != pkgCache::Dep::DpkgBreaks &&
464 D->Type != pkgCache::Dep::Replaces) ||
465 D.ParentPkg().Group() == Grp)
466 continue;
467
468 map_ptrloc *OldDepLast = NULL;
469 pkgCache::VerIterator ConVersion = D.ParentVer();
470 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
471 // duplicate the Conflicts/Breaks/Replaces for :none arch
472 NewDepends(Pkg, ConVersion, D->Version,
473 D->CompareOp, D->Type, OldDepLast);
474 }
475 }
476 }
477 }
478 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
479 return _error->Error(_("Error occurred while processing %s (%s%d)"),
480 Pkg.Name(), "AddImplicitDepends", 2);
481
482 // Read only a single record and return
483 if (OutVer != 0)
484 {
485 *OutVer = Ver;
486 return true;
487 }
488
489 /* Record the Description (it is not translated) */
490 MD5SumValue CurMd5 = List.Description_md5();
491 if (CurMd5.Value().empty() == true || List.Description().empty() == true)
492 return true;
493 std::string CurLang = List.DescriptionLanguage();
494
495 /* Before we add a new description we first search in the group for
496 a version with a description of the same MD5 - if so we reuse this
497 description group instead of creating our own for this version */
498 for (pkgCache::PkgIterator P = Grp.PackageList();
499 P.end() == false; P = Grp.NextPkg(P))
500 {
501 for (pkgCache::VerIterator V = P.VersionList();
502 V.end() == false; ++V)
503 {
504 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
505 continue;
506 Ver->DescriptionList = V->DescriptionList;
507 return true;
508 }
509 }
510
511 // We haven't found reusable descriptions, so add the first description
512 pkgCache::DescIterator Desc = Ver.DescriptionList();
513 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
514
515 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
516 if (unlikely(descindex == 0 && _error->PendingError()))
517 return _error->Error(_("Error occurred while processing %s (%s%d)"),
518 Pkg.Name(), "NewDescription", 2);
519
520 Desc->ParentPkg = Pkg.Index();
521 Ver->DescriptionList = descindex;
522
523 if (NewFileDesc(Desc,List) == false)
524 return _error->Error(_("Error occurred while processing %s (%s%d)"),
525 Pkg.Name(), "NewFileDesc", 2);
526
527 return true;
528 }
529 /*}}}*/
530 /*}}}*/
531 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
532 // ---------------------------------------------------------------------
533 /* If we found any file depends while parsing the main list we need to
534 resolve them. Since it is undesired to load the entire list of files
535 into the cache as virtual packages we do a two stage effort. MergeList
536 identifies the file depends and this creates Provdies for them by
537 re-parsing all the indexs. */
538 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
539 {
540 List.Owner = this;
541
542 unsigned int Counter = 0;
543 while (List.Step() == true)
544 {
545 string PackageName = List.Package();
546 if (PackageName.empty() == true)
547 return false;
548 string Version = List.Version();
549 if (Version.empty() == true)
550 continue;
551
552 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
553 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
554 if (Pkg.end() == true)
555 return _error->Error(_("Error occurred while processing %s (%s%d)"),
556 PackageName.c_str(), "FindPkg", 1);
557 Counter++;
558 if (Counter % 100 == 0 && Progress != 0)
559 Progress->Progress(List.Offset());
560
561 unsigned long Hash = List.VersionHash();
562 pkgCache::VerIterator Ver = Pkg.VersionList();
563 Dynamic<pkgCache::VerIterator> DynVer(Ver);
564 for (; Ver.end() == false; ++Ver)
565 {
566 if (Ver->Hash == Hash && Version == Ver.VerStr())
567 {
568 if (List.CollectFileProvides(Cache,Ver) == false)
569 return _error->Error(_("Error occurred while processing %s (%s%d)"),
570 PackageName.c_str(), "CollectFileProvides", 1);
571 break;
572 }
573 }
574
575 if (Ver.end() == true)
576 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
577 }
578
579 return true;
580 }
581 /*}}}*/
582 // CacheGenerator::NewGroup - Add a new group /*{{{*/
583 // ---------------------------------------------------------------------
584 /* This creates a new group structure and adds it to the hash table */
585 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
586 {
587 Grp = Cache.FindGrp(Name);
588 if (Grp.end() == false)
589 return true;
590
591 // Get a structure
592 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
593 if (unlikely(Group == 0))
594 return false;
595
596 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
597 map_ptrloc const idxName = WriteStringInMap(Name);
598 if (unlikely(idxName == 0))
599 return false;
600 Grp->Name = idxName;
601
602 // Insert it into the hash table
603 unsigned long const Hash = Cache.Hash(Name);
604 map_ptrloc *insertAt = &Cache.HeaderP->GrpHashTable[Hash];
605 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0)
606 insertAt = &(Cache.GrpP + *insertAt)->Next;
607 Grp->Next = *insertAt;
608 *insertAt = Group;
609
610 Grp->ID = Cache.HeaderP->GroupCount++;
611 return true;
612 }
613 /*}}}*/
614 // CacheGenerator::NewPackage - Add a new package /*{{{*/
615 // ---------------------------------------------------------------------
616 /* This creates a new package structure and adds it to the hash table */
617 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
618 const string &Arch) {
619 pkgCache::GrpIterator Grp;
620 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
621 if (unlikely(NewGroup(Grp, Name) == false))
622 return false;
623
624 Pkg = Grp.FindPkg(Arch);
625 if (Pkg.end() == false)
626 return true;
627
628 // Get a structure
629 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
630 if (unlikely(Package == 0))
631 return false;
632 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
633
634 // Insert the package into our package list
635 if (Grp->FirstPackage == 0) // the group is new
636 {
637 Grp->FirstPackage = Package;
638 // Insert it into the hash table
639 unsigned long const Hash = Cache.Hash(Name);
640 map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash];
641 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0)
642 insertAt = &(Cache.PkgP + *insertAt)->NextPackage;
643 Pkg->NextPackage = *insertAt;
644 *insertAt = Package;
645 }
646 else // Group the Packages together
647 {
648 // this package is the new last package
649 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
650 Pkg->NextPackage = LastPkg->NextPackage;
651 LastPkg->NextPackage = Package;
652 }
653 Grp->LastPackage = Package;
654
655 // Set the name, arch and the ID
656 Pkg->Name = Grp->Name;
657 Pkg->Group = Grp.Index();
658 // all is mapped to the native architecture
659 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
660 if (unlikely(idxArch == 0))
661 return false;
662 Pkg->Arch = idxArch;
663 Pkg->ID = Cache.HeaderP->PackageCount++;
664
665 return true;
666 }
667 /*}}}*/
668 // CacheGenerator::AddImplicitDepends /*{{{*/
669 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
670 pkgCache::PkgIterator &P,
671 pkgCache::VerIterator &V)
672 {
673 // copy P.Arch() into a string here as a cache remap
674 // in NewDepends() later may alter the pointer location
675 string Arch = P.Arch() == NULL ? "" : P.Arch();
676 map_ptrloc *OldDepLast = NULL;
677 /* MultiArch handling introduces a lot of implicit Dependencies:
678 - MultiArch: same → Co-Installable if they have the same version
679 - All others conflict with all other group members */
680 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
681 pkgCache::PkgIterator D = G.PackageList();
682 Dynamic<pkgCache::PkgIterator> DynD(D);
683 map_ptrloc const VerStrIdx = V->VerStr;
684 for (; D.end() != true; D = G.NextPkg(D))
685 {
686 if (Arch == D.Arch() || D->VersionList == 0)
687 continue;
688 /* We allow only one installed arch at the time
689 per group, therefore each group member conflicts
690 with all other group members */
691 if (coInstall == true)
692 {
693 // Replaces: ${self}:other ( << ${binary:Version})
694 NewDepends(D, V, VerStrIdx,
695 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
696 OldDepLast);
697 // Breaks: ${self}:other (!= ${binary:Version})
698 NewDepends(D, V, VerStrIdx,
699 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
700 OldDepLast);
701 } else {
702 // Conflicts: ${self}:other
703 NewDepends(D, V, 0,
704 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
705 OldDepLast);
706 }
707 }
708 return true;
709 }
710 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
711 pkgCache::PkgIterator &D)
712 {
713 /* MultiArch handling introduces a lot of implicit Dependencies:
714 - MultiArch: same → Co-Installable if they have the same version
715 - All others conflict with all other group members */
716 map_ptrloc *OldDepLast = NULL;
717 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
718 if (coInstall == true)
719 {
720 map_ptrloc const VerStrIdx = V->VerStr;
721 // Replaces: ${self}:other ( << ${binary:Version})
722 NewDepends(D, V, VerStrIdx,
723 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
724 OldDepLast);
725 // Breaks: ${self}:other (!= ${binary:Version})
726 NewDepends(D, V, VerStrIdx,
727 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
728 OldDepLast);
729 } else {
730 // Conflicts: ${self}:other
731 NewDepends(D, V, 0,
732 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
733 OldDepLast);
734 }
735 return true;
736 }
737
738 /*}}}*/
739 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
740 // ---------------------------------------------------------------------
741 /* */
742 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
743 ListParser &List)
744 {
745 if (CurrentFile == 0)
746 return true;
747
748 // Get a structure
749 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
750 if (VerFile == 0)
751 return 0;
752
753 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
754 VF->File = CurrentFile - Cache.PkgFileP;
755
756 // Link it to the end of the list
757 map_ptrloc *Last = &Ver->FileList;
758 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
759 Last = &V->NextFile;
760 VF->NextFile = *Last;
761 *Last = VF.Index();
762
763 VF->Offset = List.Offset();
764 VF->Size = List.Size();
765 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
766 Cache.HeaderP->MaxVerFileSize = VF->Size;
767 Cache.HeaderP->VerFileCount++;
768
769 return true;
770 }
771 /*}}}*/
772 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
773 // ---------------------------------------------------------------------
774 /* This puts a version structure in the linked list */
775 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
776 const string &VerStr,
777 map_ptrloc const ParentPkg,
778 unsigned long const Hash,
779 unsigned long Next)
780 {
781 // Get a structure
782 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
783 if (Version == 0)
784 return 0;
785
786 // Fill it in
787 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
788 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
789 Ver->NextVer = Next;
790 Ver->ParentPkg = ParentPkg;
791 Ver->Hash = Hash;
792 Ver->ID = Cache.HeaderP->VersionCount++;
793
794 // try to find the version string in the group for reuse
795 pkgCache::PkgIterator Pkg = Ver.ParentPkg();
796 pkgCache::GrpIterator Grp = Pkg.Group();
797 if (Pkg.end() == false && Grp.end() == false)
798 {
799 for (pkgCache::PkgIterator P = Grp.PackageList(); P.end() == false; P = Grp.NextPkg(P))
800 {
801 if (Pkg == P)
802 continue;
803 for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V)
804 {
805 int const cmp = strcmp(V.VerStr(), VerStr.c_str());
806 if (cmp == 0)
807 {
808 Ver->VerStr = V->VerStr;
809 return Version;
810 }
811 else if (cmp < 0)
812 break;
813 }
814 }
815 }
816 // haven't found the version string, so create
817 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
818 if (unlikely(idxVerStr == 0))
819 return 0;
820 Ver->VerStr = idxVerStr;
821 return Version;
822 }
823 /*}}}*/
824 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
825 // ---------------------------------------------------------------------
826 /* */
827 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
828 ListParser &List)
829 {
830 if (CurrentFile == 0)
831 return true;
832
833 // Get a structure
834 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
835 if (DescFile == 0)
836 return false;
837
838 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
839 DF->File = CurrentFile - Cache.PkgFileP;
840
841 // Link it to the end of the list
842 map_ptrloc *Last = &Desc->FileList;
843 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
844 Last = &D->NextFile;
845
846 DF->NextFile = *Last;
847 *Last = DF.Index();
848
849 DF->Offset = List.Offset();
850 DF->Size = List.Size();
851 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
852 Cache.HeaderP->MaxDescFileSize = DF->Size;
853 Cache.HeaderP->DescFileCount++;
854
855 return true;
856 }
857 /*}}}*/
858 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
859 // ---------------------------------------------------------------------
860 /* This puts a description structure in the linked list */
861 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
862 const string &Lang,
863 const MD5SumValue &md5sum,
864 map_ptrloc idxmd5str)
865 {
866 // Get a structure
867 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
868 if (Description == 0)
869 return 0;
870
871 // Fill it in
872 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
873 Desc->ID = Cache.HeaderP->DescriptionCount++;
874 map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
875 if (unlikely(idxlanguage_code == 0))
876 return 0;
877 Desc->language_code = idxlanguage_code;
878
879 if (idxmd5str != 0)
880 Desc->md5sum = idxmd5str;
881 else
882 {
883 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
884 if (unlikely(idxmd5sum == 0))
885 return 0;
886 Desc->md5sum = idxmd5sum;
887 }
888
889 return Description;
890 }
891 /*}}}*/
892 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
893 // ---------------------------------------------------------------------
894 /* This creates a dependency element in the tree. It is linked to the
895 version and to the package that it is pointing to. */
896 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
897 pkgCache::VerIterator &Ver,
898 string const &Version,
899 unsigned int const &Op,
900 unsigned int const &Type,
901 map_ptrloc* &OldDepLast)
902 {
903 map_ptrloc index = 0;
904 if (Version.empty() == false)
905 {
906 int const CmpOp = Op & 0x0F;
907 // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages)
908 if (CmpOp == pkgCache::Dep::Equals && strcmp(Version.c_str(), Ver.VerStr()) == 0)
909 index = Ver->VerStr;
910
911 if (index == 0)
912 {
913 void const * const oldMap = Map.Data();
914 index = WriteStringInMap(Version);
915 if (unlikely(index == 0))
916 return false;
917 if (OldDepLast != 0 && oldMap != Map.Data())
918 OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
919 }
920 }
921 return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
922 }
923 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
924 pkgCache::VerIterator &Ver,
925 map_ptrloc const Version,
926 unsigned int const &Op,
927 unsigned int const &Type,
928 map_ptrloc* &OldDepLast)
929 {
930 void const * const oldMap = Map.Data();
931 // Get a structure
932 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
933 if (unlikely(Dependency == 0))
934 return false;
935
936 // Fill it in
937 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
938 Dynamic<pkgCache::DepIterator> DynDep(Dep);
939 Dep->ParentVer = Ver.Index();
940 Dep->Type = Type;
941 Dep->CompareOp = Op;
942 Dep->Version = Version;
943 Dep->ID = Cache.HeaderP->DependsCount++;
944
945 // Link it to the package
946 Dep->Package = Pkg.Index();
947 Dep->NextRevDepends = Pkg->RevDepends;
948 Pkg->RevDepends = Dep.Index();
949
950 // Do we know where to link the Dependency to?
951 if (OldDepLast == NULL)
952 {
953 OldDepLast = &Ver->DependsList;
954 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
955 OldDepLast = &D->NextDepends;
956 } else if (oldMap != Map.Data())
957 OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
958
959 Dep->NextDepends = *OldDepLast;
960 *OldDepLast = Dep.Index();
961 OldDepLast = &Dep->NextDepends;
962
963 return true;
964 }
965 /*}}}*/
966 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
967 // ---------------------------------------------------------------------
968 /* This creates a Group and the Package to link this dependency to if
969 needed and handles also the caching of the old endpoint */
970 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
971 const string &PackageName,
972 const string &Arch,
973 const string &Version,
974 unsigned int Op,
975 unsigned int Type)
976 {
977 pkgCache::GrpIterator Grp;
978 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
979 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
980 return false;
981
982 // Locate the target package
983 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
984 // we don't create 'none' packages and their dependencies if we can avoid it …
985 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
986 return true;
987 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
988 if (Pkg.end() == true) {
989 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
990 return false;
991 }
992
993 // Is it a file dependency?
994 if (unlikely(PackageName[0] == '/'))
995 FoundFileDeps = true;
996
997 /* Caching the old end point speeds up generation substantially */
998 if (OldDepVer != Ver) {
999 OldDepLast = NULL;
1000 OldDepVer = Ver;
1001 }
1002
1003 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
1004 }
1005 /*}}}*/
1006 // ListParser::NewProvides - Create a Provides element /*{{{*/
1007 // ---------------------------------------------------------------------
1008 /* */
1009 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
1010 const string &PkgName,
1011 const string &PkgArch,
1012 const string &Version)
1013 {
1014 pkgCache &Cache = Owner->Cache;
1015
1016 // We do not add self referencing provides
1017 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
1018 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
1019 return true;
1020
1021 // Get a structure
1022 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
1023 if (unlikely(Provides == 0))
1024 return false;
1025 Cache.HeaderP->ProvidesCount++;
1026
1027 // Fill it in
1028 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
1029 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
1030 Prv->Version = Ver.Index();
1031 Prv->NextPkgProv = Ver->ProvidesList;
1032 Ver->ProvidesList = Prv.Index();
1033 if (Version.empty() == false) {
1034 map_ptrloc const idxProvideVersion = WriteString(Version);
1035 Prv->ProvideVersion = idxProvideVersion;
1036 if (unlikely(idxProvideVersion == 0))
1037 return false;
1038 }
1039
1040 // Locate the target package
1041 pkgCache::PkgIterator Pkg;
1042 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
1043 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
1044 return false;
1045
1046 // Link it to the package
1047 Prv->ParentPkg = Pkg.Index();
1048 Prv->NextProvides = Pkg->ProvidesList;
1049 Pkg->ProvidesList = Prv.Index();
1050
1051 return true;
1052 }
1053 /*}}}*/
1054 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1055 // ---------------------------------------------------------------------
1056 /* This is used to select which file is to be associated with all newly
1057 added versions. The caller is responsible for setting the IMS fields. */
1058 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1059 const pkgIndexFile &Index,
1060 unsigned long Flags)
1061 {
1062 // Get some space for the structure
1063 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1064 if (unlikely(idxFile == 0))
1065 return false;
1066 CurrentFile = Cache.PkgFileP + idxFile;
1067
1068 // Fill it in
1069 map_ptrloc const idxFileName = WriteStringInMap(File);
1070 map_ptrloc const idxSite = WriteUniqString(Site);
1071 if (unlikely(idxFileName == 0 || idxSite == 0))
1072 return false;
1073 CurrentFile->FileName = idxFileName;
1074 CurrentFile->Site = idxSite;
1075 CurrentFile->NextFile = Cache.HeaderP->FileList;
1076 CurrentFile->Flags = Flags;
1077 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1078 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1079 if (unlikely(idxIndexType == 0))
1080 return false;
1081 CurrentFile->IndexType = idxIndexType;
1082 PkgFileName = File;
1083 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1084 Cache.HeaderP->PackageFileCount++;
1085
1086 if (Progress != 0)
1087 Progress->SubProgress(Index.Size());
1088 return true;
1089 }
1090 /*}}}*/
1091 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1092 // ---------------------------------------------------------------------
1093 /* This is used to create handles to strings. Given the same text it
1094 always returns the same number */
1095 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1096 unsigned int Size)
1097 {
1098 /* We use a very small transient hash table here, this speeds up generation
1099 by a fair amount on slower machines */
1100 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1101 if (Bucket != 0 &&
1102 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1103 return Bucket->String;
1104
1105 // Search for an insertion point
1106 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1107 int Res = 1;
1108 map_ptrloc *Last = &Cache.HeaderP->StringList;
1109 for (; I != Cache.StringItemP; Last = &I->NextItem,
1110 I = Cache.StringItemP + I->NextItem)
1111 {
1112 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1113 if (Res >= 0)
1114 break;
1115 }
1116
1117 // Match
1118 if (Res == 0)
1119 {
1120 Bucket = I;
1121 return I->String;
1122 }
1123
1124 // Get a structure
1125 void const * const oldMap = Map.Data();
1126 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1127 if (Item == 0)
1128 return 0;
1129
1130 map_ptrloc const idxString = WriteStringInMap(S,Size);
1131 if (unlikely(idxString == 0))
1132 return 0;
1133 if (oldMap != Map.Data()) {
1134 Last += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
1135 I += (pkgCache::StringItem const * const) Map.Data() - (pkgCache::StringItem const * const) oldMap;
1136 }
1137 *Last = Item;
1138
1139 // Fill in the structure
1140 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1141 ItemP->NextItem = I - Cache.StringItemP;
1142 ItemP->String = idxString;
1143
1144 Bucket = ItemP;
1145 return ItemP->String;
1146 }
1147 /*}}}*/
1148 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1149 // ---------------------------------------------------------------------
1150 /* This just verifies that each file in the list of index files exists,
1151 has matching attributes with the cache and the cache does not have
1152 any extra files. */
1153 static bool CheckValidity(const string &CacheFile,
1154 pkgSourceList &List,
1155 FileIterator Start,
1156 FileIterator End,
1157 MMap **OutMap = 0)
1158 {
1159 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1160 // No file, certainly invalid
1161 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1162 {
1163 if (Debug == true)
1164 std::clog << "CacheFile doesn't exist" << std::endl;
1165 return false;
1166 }
1167
1168 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1169 {
1170 if (Debug == true)
1171 std::clog << "sources.list is newer than the cache" << std::endl;
1172 return false;
1173 }
1174
1175 // Map it
1176 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1177 SPtr<MMap> Map = new MMap(CacheF,0);
1178 pkgCache Cache(Map);
1179 if (_error->PendingError() == true || Map->Size() == 0)
1180 {
1181 if (Debug == true)
1182 std::clog << "Errors are pending or Map is empty()" << std::endl;
1183 _error->Discard();
1184 return false;
1185 }
1186
1187 /* Now we check every index file, see if it is in the cache,
1188 verify the IMS data and check that it is on the disk too.. */
1189 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1190 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1191 for (; Start != End; ++Start)
1192 {
1193 if (Debug == true)
1194 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1195 if ((*Start)->HasPackages() == false)
1196 {
1197 if (Debug == true)
1198 std::clog << "Has NO packages" << std::endl;
1199 continue;
1200 }
1201
1202 if ((*Start)->Exists() == false)
1203 {
1204 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1205 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1206 (*Start)->Describe().c_str());
1207 #endif
1208 if (Debug == true)
1209 std::clog << "file doesn't exist" << std::endl;
1210 continue;
1211 }
1212
1213 // FindInCache is also expected to do an IMS check.
1214 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1215 if (File.end() == true)
1216 {
1217 if (Debug == true)
1218 std::clog << "FindInCache returned end-Pointer" << std::endl;
1219 return false;
1220 }
1221
1222 Visited[File->ID] = true;
1223 if (Debug == true)
1224 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1225 }
1226
1227 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1228 if (Visited[I] == false)
1229 {
1230 if (Debug == true)
1231 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1232 return false;
1233 }
1234
1235 if (_error->PendingError() == true)
1236 {
1237 if (Debug == true)
1238 {
1239 std::clog << "Validity failed because of pending errors:" << std::endl;
1240 _error->DumpErrors();
1241 }
1242 _error->Discard();
1243 return false;
1244 }
1245
1246 if (OutMap != 0)
1247 *OutMap = Map.UnGuard();
1248 return true;
1249 }
1250 /*}}}*/
1251 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1252 // ---------------------------------------------------------------------
1253 /* Size is kind of an abstract notion that is only used for the progress
1254 meter */
1255 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1256 {
1257 unsigned long TotalSize = 0;
1258 for (; Start < End; ++Start)
1259 {
1260 if ((*Start)->HasPackages() == false)
1261 continue;
1262 TotalSize += (*Start)->Size();
1263 }
1264 return TotalSize;
1265 }
1266 /*}}}*/
1267 // BuildCache - Merge the list of index files into the cache /*{{{*/
1268 // ---------------------------------------------------------------------
1269 /* */
1270 static bool BuildCache(pkgCacheGenerator &Gen,
1271 OpProgress *Progress,
1272 unsigned long &CurrentSize,unsigned long TotalSize,
1273 FileIterator Start, FileIterator End)
1274 {
1275 FileIterator I;
1276 for (I = Start; I != End; ++I)
1277 {
1278 if ((*I)->HasPackages() == false)
1279 continue;
1280
1281 if ((*I)->Exists() == false)
1282 continue;
1283
1284 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1285 {
1286 _error->Warning("Duplicate sources.list entry %s",
1287 (*I)->Describe().c_str());
1288 continue;
1289 }
1290
1291 unsigned long Size = (*I)->Size();
1292 if (Progress != NULL)
1293 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1294 CurrentSize += Size;
1295
1296 if ((*I)->Merge(Gen,Progress) == false)
1297 return false;
1298 }
1299
1300 if (Gen.HasFileDeps() == true)
1301 {
1302 if (Progress != NULL)
1303 Progress->Done();
1304 TotalSize = ComputeSize(Start, End);
1305 CurrentSize = 0;
1306 for (I = Start; I != End; ++I)
1307 {
1308 unsigned long Size = (*I)->Size();
1309 if (Progress != NULL)
1310 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1311 CurrentSize += Size;
1312 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1313 return false;
1314 }
1315 }
1316
1317 return true;
1318 }
1319 /*}}}*/
1320 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1321 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1322 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1323 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1324 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1325 Flags |= MMap::Moveable;
1326 if (_config->FindB("APT::Cache-Fallback", false) == true)
1327 Flags |= MMap::Fallback;
1328 if (CacheF != NULL)
1329 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1330 else
1331 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1332 }
1333 /*}}}*/
1334 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1335 // ---------------------------------------------------------------------
1336 /* This makes sure that the status cache (the cache that has all
1337 index files from the sources list and all local ones) is ready
1338 to be mmaped. If OutMap is not zero then a MMap object representing
1339 the cache will be stored there. This is pretty much mandetory if you
1340 are using AllowMem. AllowMem lets the function be run as non-root
1341 where it builds the cache 'fast' into a memory buffer. */
1342 APT_DEPRECATED bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1343 MMap **OutMap, bool AllowMem)
1344 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1345 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1346 MMap **OutMap,bool AllowMem)
1347 {
1348 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1349
1350 std::vector<pkgIndexFile *> Files;
1351 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1352 i != List.end();
1353 ++i)
1354 {
1355 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1356 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1357 j != Indexes->end();
1358 ++j)
1359 Files.push_back (*j);
1360 }
1361
1362 unsigned long const EndOfSource = Files.size();
1363 if (_system->AddStatusFiles(Files) == false)
1364 return false;
1365
1366 // Decide if we can write to the files..
1367 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1368 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1369
1370 // ensure the cache directory exists
1371 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1372 {
1373 string dir = _config->FindDir("Dir::Cache");
1374 size_t const len = dir.size();
1375 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1376 dir = dir.substr(0, len - 5);
1377 if (CacheFile.empty() == false)
1378 CreateDirectory(dir, flNotFile(CacheFile));
1379 if (SrcCacheFile.empty() == false)
1380 CreateDirectory(dir, flNotFile(SrcCacheFile));
1381 }
1382
1383 // Decide if we can write to the cache
1384 bool Writeable = false;
1385 if (CacheFile.empty() == false)
1386 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1387 else
1388 if (SrcCacheFile.empty() == false)
1389 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1390 if (Debug == true)
1391 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1392
1393 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1394 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1395
1396 if (Progress != NULL)
1397 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1398
1399 // Cache is OK, Fin.
1400 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1401 {
1402 if (Progress != NULL)
1403 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1404 if (Debug == true)
1405 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1406 return true;
1407 }
1408 else if (Debug == true)
1409 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1410
1411 /* At this point we know we need to reconstruct the package cache,
1412 begin. */
1413 SPtr<FileFd> CacheF;
1414 SPtr<DynamicMMap> Map;
1415 if (Writeable == true && CacheFile.empty() == false)
1416 {
1417 _error->PushToStack();
1418 unlink(CacheFile.c_str());
1419 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1420 fchmod(CacheF->Fd(),0644);
1421 Map = CreateDynamicMMap(CacheF, MMap::Public);
1422 if (_error->PendingError() == true)
1423 {
1424 delete CacheF.UnGuard();
1425 delete Map.UnGuard();
1426 if (Debug == true)
1427 std::clog << "Open filebased MMap FAILED" << std::endl;
1428 Writeable = false;
1429 if (AllowMem == false)
1430 {
1431 _error->MergeWithStack();
1432 return false;
1433 }
1434 _error->RevertToStack();
1435 }
1436 else
1437 {
1438 _error->MergeWithStack();
1439 if (Debug == true)
1440 std::clog << "Open filebased MMap" << std::endl;
1441 }
1442 }
1443 if (Writeable == false || CacheFile.empty() == true)
1444 {
1445 // Just build it in memory..
1446 Map = CreateDynamicMMap(NULL);
1447 if (Debug == true)
1448 std::clog << "Open memory Map (not filebased)" << std::endl;
1449 }
1450
1451 // Lets try the source cache.
1452 unsigned long CurrentSize = 0;
1453 unsigned long TotalSize = 0;
1454 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1455 Files.begin()+EndOfSource) == true)
1456 {
1457 if (Debug == true)
1458 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1459 // Preload the map with the source cache
1460 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1461 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1462 if ((alloc == 0 && _error->PendingError())
1463 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1464 SCacheF.Size()) == false)
1465 return false;
1466
1467 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1468
1469 // Build the status cache
1470 pkgCacheGenerator Gen(Map.Get(),Progress);
1471 if (_error->PendingError() == true)
1472 return false;
1473 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1474 Files.begin()+EndOfSource,Files.end()) == false)
1475 return false;
1476 }
1477 else
1478 {
1479 if (Debug == true)
1480 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1481 TotalSize = ComputeSize(Files.begin(),Files.end());
1482
1483 // Build the source cache
1484 pkgCacheGenerator Gen(Map.Get(),Progress);
1485 if (_error->PendingError() == true)
1486 return false;
1487 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1488 Files.begin(),Files.begin()+EndOfSource) == false)
1489 return false;
1490
1491 // Write it back
1492 if (Writeable == true && SrcCacheFile.empty() == false)
1493 {
1494 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1495 if (_error->PendingError() == true)
1496 return false;
1497
1498 fchmod(SCacheF.Fd(),0644);
1499
1500 // Write out the main data
1501 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1502 return _error->Error(_("IO Error saving source cache"));
1503 SCacheF.Sync();
1504
1505 // Write out the proper header
1506 Gen.GetCache().HeaderP->Dirty = false;
1507 if (SCacheF.Seek(0) == false ||
1508 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1509 return _error->Error(_("IO Error saving source cache"));
1510 Gen.GetCache().HeaderP->Dirty = true;
1511 SCacheF.Sync();
1512 }
1513
1514 // Build the status cache
1515 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1516 Files.begin()+EndOfSource,Files.end()) == false)
1517 return false;
1518 }
1519 if (Debug == true)
1520 std::clog << "Caches are ready for shipping" << std::endl;
1521
1522 if (_error->PendingError() == true)
1523 return false;
1524 if (OutMap != 0)
1525 {
1526 if (CacheF != 0)
1527 {
1528 delete Map.UnGuard();
1529 *OutMap = new MMap(*CacheF,0);
1530 }
1531 else
1532 {
1533 *OutMap = Map.UnGuard();
1534 }
1535 }
1536
1537 return true;
1538 }
1539 /*}}}*/
1540 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1541 // ---------------------------------------------------------------------
1542 /* */
1543 APT_DEPRECATED bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1544 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1545 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1546 {
1547 std::vector<pkgIndexFile *> Files;
1548 unsigned long EndOfSource = Files.size();
1549 if (_system->AddStatusFiles(Files) == false)
1550 return false;
1551
1552 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1553 unsigned long CurrentSize = 0;
1554 unsigned long TotalSize = 0;
1555
1556 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1557
1558 // Build the status cache
1559 if (Progress != NULL)
1560 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1561 pkgCacheGenerator Gen(Map.Get(),Progress);
1562 if (_error->PendingError() == true)
1563 return false;
1564 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1565 Files.begin()+EndOfSource,Files.end()) == false)
1566 return false;
1567
1568 if (_error->PendingError() == true)
1569 return false;
1570 *OutMap = Map.UnGuard();
1571
1572 return true;
1573 }
1574 /*}}}*/
1575 // IsDuplicateDescription /*{{{*/
1576 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1577 MD5SumValue const &CurMd5, std::string const &CurLang)
1578 {
1579 // Descriptions in the same link-list have all the same md5
1580 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1581 return false;
1582 for (; Desc.end() == false; ++Desc)
1583 if (Desc.LanguageCode() == CurLang)
1584 return true;
1585 return false;
1586 }
1587 /*}}}*/
1588 // CacheGenerator::FinishCache /*{{{*/
1589 bool pkgCacheGenerator::FinishCache(OpProgress * /*Progress*/)
1590 {
1591 return true;
1592 }
1593 /*}}}*/