merged lp:~mvo/apt/mvo
[ntk/apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
73 Cache.HeaderP->Architecture = idxArchitecture;
74 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
75 return;
76 Cache.ReMap();
77 }
78 else
79 {
80 // Map directly from the existing file
81 Cache.ReMap();
82 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
83 if (Cache.VS != _system->VS)
84 {
85 _error->Error(_("Cache has an incompatible versioning system"));
86 return;
87 }
88 }
89
90 Cache.HeaderP->Dirty = true;
91 Map.Sync(0,sizeof(pkgCache::Header));
92 }
93 /*}}}*/
94 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
95 // ---------------------------------------------------------------------
96 /* We sync the data then unset the dirty flag in two steps so as to
97 advoid a problem during a crash */
98 pkgCacheGenerator::~pkgCacheGenerator()
99 {
100 if (_error->PendingError() == true)
101 return;
102 if (Map.Sync() == false)
103 return;
104
105 Cache.HeaderP->Dirty = false;
106 Cache.HeaderP->CacheFileSize = Map.Size();
107 Map.Sync(0,sizeof(pkgCache::Header));
108 }
109 /*}}}*/
110 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
111 if (oldMap == newMap)
112 return;
113
114 if (_config->FindB("Debug::pkgCacheGen", false))
115 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
116
117 Cache.ReMap(false);
118
119 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
120
121 for (size_t i = 0; i < _count(UniqHash); ++i)
122 if (UniqHash[i] != 0)
123 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
124
125 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
126 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
127 (*i)->ReMap(oldMap, newMap);
128 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
129 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
130 (*i)->ReMap(oldMap, newMap);
131 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
132 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
133 (*i)->ReMap(oldMap, newMap);
134 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
135 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
136 (*i)->ReMap(oldMap, newMap);
137 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
138 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
139 (*i)->ReMap(oldMap, newMap);
140 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
141 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
142 (*i)->ReMap(oldMap, newMap);
143 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
144 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
145 (*i)->ReMap(oldMap, newMap);
146 } /*}}}*/
147 // CacheGenerator::WriteStringInMap /*{{{*/
148 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
149 const unsigned long &Len) {
150 void const * const oldMap = Map.Data();
151 map_ptrloc const index = Map.WriteString(String, Len);
152 if (index != 0)
153 ReMap(oldMap, Map.Data());
154 return index;
155 }
156 /*}}}*/
157 // CacheGenerator::WriteStringInMap /*{{{*/
158 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
159 void const * const oldMap = Map.Data();
160 map_ptrloc const index = Map.WriteString(String);
161 if (index != 0)
162 ReMap(oldMap, Map.Data());
163 return index;
164 }
165 /*}}}*/
166 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
167 void const * const oldMap = Map.Data();
168 map_ptrloc const index = Map.Allocate(size);
169 if (index != 0)
170 ReMap(oldMap, Map.Data());
171 return index;
172 }
173 /*}}}*/
174 // CacheGenerator::MergeList - Merge the package list /*{{{*/
175 // ---------------------------------------------------------------------
176 /* This provides the generation of the entries in the cache. Each loop
177 goes through a single package record from the underlying parse engine. */
178 bool pkgCacheGenerator::MergeList(ListParser &List,
179 pkgCache::VerIterator *OutVer)
180 {
181 List.Owner = this;
182
183 unsigned int Counter = 0;
184 while (List.Step() == true)
185 {
186 string const PackageName = List.Package();
187 if (PackageName.empty() == true)
188 return false;
189
190 Counter++;
191 if (Counter % 100 == 0 && Progress != 0)
192 Progress->Progress(List.Offset());
193
194 string Arch = List.Architecture();
195 string const Version = List.Version();
196 if (Version.empty() == true && Arch.empty() == true)
197 {
198 // package descriptions
199 if (MergeListGroup(List, PackageName) == false)
200 return false;
201 continue;
202 }
203
204 if (Arch.empty() == true)
205 {
206 // use the pseudo arch 'none' for arch-less packages
207 Arch = "none";
208 /* We might built a SingleArchCache here, which we don't want to blow up
209 just for these :none packages to a proper MultiArchCache, so just ensure
210 that we have always a native package structure first for SingleArch */
211 pkgCache::PkgIterator NP;
212 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
213 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
214 // TRANSLATOR: The first placeholder is a package name,
215 // the other two should be copied verbatim as they include debug info
216 return _error->Error(_("Error occurred while processing %s (%s%d)"),
217 PackageName.c_str(), "NewPackage", 0);
218 }
219
220 // Get a pointer to the package structure
221 pkgCache::PkgIterator Pkg;
222 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
223 if (NewPackage(Pkg, PackageName, Arch) == false)
224 // TRANSLATOR: The first placeholder is a package name,
225 // the other two should be copied verbatim as they include debug info
226 return _error->Error(_("Error occurred while processing %s (%s%d)"),
227 PackageName.c_str(), "NewPackage", 1);
228
229
230 if (Version.empty() == true)
231 {
232 if (MergeListPackage(List, Pkg) == false)
233 return false;
234 }
235 else
236 {
237 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
238 return false;
239 }
240
241 if (OutVer != 0)
242 {
243 FoundFileDeps |= List.HasFileDeps();
244 return true;
245 }
246 }
247
248 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
249 return _error->Error(_("Wow, you exceeded the number of package "
250 "names this APT is capable of."));
251 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
252 return _error->Error(_("Wow, you exceeded the number of versions "
253 "this APT is capable of."));
254 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
255 return _error->Error(_("Wow, you exceeded the number of descriptions "
256 "this APT is capable of."));
257 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
258 return _error->Error(_("Wow, you exceeded the number of dependencies "
259 "this APT is capable of."));
260
261 FoundFileDeps |= List.HasFileDeps();
262 return true;
263 }
264 // CacheGenerator::MergeListGroup /*{{{*/
265 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
266 {
267 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
268 // a group has no data on it's own, only packages have it but these
269 // stanzas like this come from Translation- files to add descriptions,
270 // but without a version we don't need a description for it…
271 if (Grp.end() == true)
272 return true;
273 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
274
275 pkgCache::PkgIterator Pkg;
276 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
277 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
278 if (MergeListPackage(List, Pkg) == false)
279 return false;
280
281 return true;
282 }
283 /*}}}*/
284 // CacheGenerator::MergeListPackage /*{{{*/
285 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
286 {
287 // we first process the package, then the descriptions
288 // (for deb this package processing is in fact a no-op)
289 pkgCache::VerIterator Ver(Cache);
290 Dynamic<pkgCache::VerIterator> DynVer(Ver);
291 if (List.UsePackage(Pkg, Ver) == false)
292 return _error->Error(_("Error occurred while processing %s (%s%d)"),
293 Pkg.Name(), "UsePackage", 1);
294
295 // Find the right version to write the description
296 MD5SumValue CurMd5 = List.Description_md5();
297 std::string CurLang = List.DescriptionLanguage();
298
299 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
300 {
301 pkgCache::DescIterator Desc = Ver.DescriptionList();
302
303 // a version can only have one md5 describing it
304 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
305 continue;
306
307 // don't add a new description if we have one for the given
308 // md5 && language
309 if (IsDuplicateDescription(Desc, CurMd5, CurLang) == true)
310 continue;
311
312 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
313 // we add at the end, so that the start is constant as we need
314 // that to be able to efficiently share these lists
315 map_ptrloc *LastDesc = &Ver->DescriptionList;
316 for (;Desc.end() == false && Desc->NextDesc != 0; ++Desc);
317 if (Desc.end() == false)
318 LastDesc = &Desc->NextDesc;
319
320 void const * const oldMap = Map.Data();
321 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
322 if (unlikely(descindex == 0 && _error->PendingError()))
323 return _error->Error(_("Error occurred while processing %s (%s%d)"),
324 Pkg.Name(), "NewDescription", 1);
325 if (oldMap != Map.Data())
326 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
327 *LastDesc = descindex;
328 Desc->ParentPkg = Pkg.Index();
329
330 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
331 return _error->Error(_("Error occurred while processing %s (%s%d)"),
332 Pkg.Name(), "NewFileDesc", 1);
333
334 // we can stop here as all "same" versions will share the description
335 break;
336 }
337
338 return true;
339 }
340 /*}}}*/
341 // CacheGenerator::MergeListVersion /*{{{*/
342 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
343 std::string const &Version, pkgCache::VerIterator* &OutVer)
344 {
345 pkgCache::VerIterator Ver = Pkg.VersionList();
346 Dynamic<pkgCache::VerIterator> DynVer(Ver);
347 map_ptrloc *LastVer = &Pkg->VersionList;
348 void const * oldMap = Map.Data();
349
350 unsigned long const Hash = List.VersionHash();
351 if (Ver.end() == false)
352 {
353 /* We know the list is sorted so we use that fact in the search.
354 Insertion of new versions is done with correct sorting */
355 int Res = 1;
356 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
357 {
358 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
359 // Version is higher as current version - insert here
360 if (Res > 0)
361 break;
362 // Versionstrings are equal - is hash also equal?
363 if (Res == 0 && Ver->Hash == Hash)
364 break;
365 // proceed with the next till we have either the right
366 // or we found another version (which will be lower)
367 }
368
369 /* We already have a version for this item, record that we saw it */
370 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
371 {
372 if (List.UsePackage(Pkg,Ver) == false)
373 return _error->Error(_("Error occurred while processing %s (%s%d)"),
374 Pkg.Name(), "UsePackage", 2);
375
376 if (NewFileVer(Ver,List) == false)
377 return _error->Error(_("Error occurred while processing %s (%s%d)"),
378 Pkg.Name(), "NewFileVer", 1);
379
380 // Read only a single record and return
381 if (OutVer != 0)
382 {
383 *OutVer = Ver;
384 return true;
385 }
386
387 return true;
388 }
389 }
390
391 // Add a new version
392 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
393 if (verindex == 0 && _error->PendingError())
394 return _error->Error(_("Error occurred while processing %s (%s%d)"),
395 Pkg.Name(), "NewVersion", 1);
396
397 if (oldMap != Map.Data())
398 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
399 *LastVer = verindex;
400 Ver->ParentPkg = Pkg.Index();
401 Ver->Hash = Hash;
402
403 if (unlikely(List.NewVersion(Ver) == false))
404 return _error->Error(_("Error occurred while processing %s (%s%d)"),
405 Pkg.Name(), "NewVersion", 2);
406
407 if (unlikely(List.UsePackage(Pkg,Ver) == false))
408 return _error->Error(_("Error occurred while processing %s (%s%d)"),
409 Pkg.Name(), "UsePackage", 3);
410
411 if (unlikely(NewFileVer(Ver,List) == false))
412 return _error->Error(_("Error occurred while processing %s (%s%d)"),
413 Pkg.Name(), "NewFileVer", 2);
414
415 pkgCache::GrpIterator Grp = Pkg.Group();
416 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
417
418 /* If it is the first version of this package we need to add implicit
419 Multi-Arch dependencies to all other package versions in the group now -
420 otherwise we just add them for this new version */
421 if (Pkg.VersionList()->NextVer == 0)
422 {
423 pkgCache::PkgIterator P = Grp.PackageList();
424 Dynamic<pkgCache::PkgIterator> DynP(P);
425 for (; P.end() != true; P = Grp.NextPkg(P))
426 {
427 if (P->ID == Pkg->ID)
428 continue;
429 pkgCache::VerIterator V = P.VersionList();
430 Dynamic<pkgCache::VerIterator> DynV(V);
431 for (; V.end() != true; ++V)
432 if (unlikely(AddImplicitDepends(V, Pkg) == false))
433 return _error->Error(_("Error occurred while processing %s (%s%d)"),
434 Pkg.Name(), "AddImplicitDepends", 1);
435 }
436 /* :none packages are packages without an architecture. They are forbidden by
437 debian-policy, so usually they will only be in (old) dpkg status files -
438 and dpkg will complain about them - and are pretty rare. We therefore do
439 usually not create conflicts while the parent is created, but only if a :none
440 package (= the target) appears. This creates incorrect dependencies on :none
441 for architecture-specific dependencies on the package we copy from, but we
442 will ignore this bug as architecture-specific dependencies are only allowed
443 in jessie and until then the :none packages should be extinct (hopefully).
444 In other words: This should work long enough to allow graceful removal of
445 these packages, it is not supposed to allow users to keep using them … */
446 if (strcmp(Pkg.Arch(), "none") == 0)
447 {
448 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
449 if (M.end() == false && Pkg != M)
450 {
451 pkgCache::DepIterator D = M.RevDependsList();
452 Dynamic<pkgCache::DepIterator> DynD(D);
453 for (; D.end() == false; ++D)
454 {
455 if ((D->Type != pkgCache::Dep::Conflicts &&
456 D->Type != pkgCache::Dep::DpkgBreaks &&
457 D->Type != pkgCache::Dep::Replaces) ||
458 D.ParentPkg().Group() == Grp)
459 continue;
460
461 map_ptrloc *OldDepLast = NULL;
462 pkgCache::VerIterator ConVersion = D.ParentVer();
463 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
464 // duplicate the Conflicts/Breaks/Replaces for :none arch
465 if (D->Version == 0)
466 NewDepends(Pkg, ConVersion, "", 0, D->Type, OldDepLast);
467 else
468 NewDepends(Pkg, ConVersion, D.TargetVer(),
469 D->CompareOp, D->Type, OldDepLast);
470 }
471 }
472 }
473 }
474 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
475 return _error->Error(_("Error occurred while processing %s (%s%d)"),
476 Pkg.Name(), "AddImplicitDepends", 2);
477
478 // Read only a single record and return
479 if (OutVer != 0)
480 {
481 *OutVer = Ver;
482 return true;
483 }
484
485 /* Record the Description (it is not translated) */
486 MD5SumValue CurMd5 = List.Description_md5();
487 if (CurMd5.Value().empty() == true)
488 return true;
489 std::string CurLang = List.DescriptionLanguage();
490
491 /* Before we add a new description we first search in the group for
492 a version with a description of the same MD5 - if so we reuse this
493 description group instead of creating our own for this version */
494 for (pkgCache::PkgIterator P = Grp.PackageList();
495 P.end() == false; P = Grp.NextPkg(P))
496 {
497 for (pkgCache::VerIterator V = P.VersionList();
498 V.end() == false; ++V)
499 {
500 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
501 continue;
502 Ver->DescriptionList = V->DescriptionList;
503 return true;
504 }
505 }
506
507 // We haven't found reusable descriptions, so add the first description
508 pkgCache::DescIterator Desc = Ver.DescriptionList();
509 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
510 map_ptrloc *LastDesc = &Ver->DescriptionList;
511
512 oldMap = Map.Data();
513 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
514 if (unlikely(descindex == 0 && _error->PendingError()))
515 return _error->Error(_("Error occurred while processing %s (%s%d)"),
516 Pkg.Name(), "NewDescription", 2);
517 if (oldMap != Map.Data())
518 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
519 *LastDesc = descindex;
520 Desc->ParentPkg = Pkg.Index();
521
522 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
523 return _error->Error(_("Error occurred while processing %s (%s%d)"),
524 Pkg.Name(), "NewFileDesc", 2);
525
526 return true;
527 }
528 /*}}}*/
529 /*}}}*/
530 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
531 // ---------------------------------------------------------------------
532 /* If we found any file depends while parsing the main list we need to
533 resolve them. Since it is undesired to load the entire list of files
534 into the cache as virtual packages we do a two stage effort. MergeList
535 identifies the file depends and this creates Provdies for them by
536 re-parsing all the indexs. */
537 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
538 {
539 List.Owner = this;
540
541 unsigned int Counter = 0;
542 while (List.Step() == true)
543 {
544 string PackageName = List.Package();
545 if (PackageName.empty() == true)
546 return false;
547 string Version = List.Version();
548 if (Version.empty() == true)
549 continue;
550
551 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
552 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
553 if (Pkg.end() == true)
554 return _error->Error(_("Error occurred while processing %s (%s%d)"),
555 PackageName.c_str(), "FindPkg", 1);
556 Counter++;
557 if (Counter % 100 == 0 && Progress != 0)
558 Progress->Progress(List.Offset());
559
560 unsigned long Hash = List.VersionHash();
561 pkgCache::VerIterator Ver = Pkg.VersionList();
562 Dynamic<pkgCache::VerIterator> DynVer(Ver);
563 for (; Ver.end() == false; ++Ver)
564 {
565 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
566 {
567 if (List.CollectFileProvides(Cache,Ver) == false)
568 return _error->Error(_("Error occurred while processing %s (%s%d)"),
569 PackageName.c_str(), "CollectFileProvides", 1);
570 break;
571 }
572 }
573
574 if (Ver.end() == true)
575 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
576 }
577
578 return true;
579 }
580 /*}}}*/
581 // CacheGenerator::NewGroup - Add a new group /*{{{*/
582 // ---------------------------------------------------------------------
583 /* This creates a new group structure and adds it to the hash table */
584 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
585 {
586 Grp = Cache.FindGrp(Name);
587 if (Grp.end() == false)
588 return true;
589
590 // Get a structure
591 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
592 if (unlikely(Group == 0))
593 return false;
594
595 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
596 map_ptrloc const idxName = WriteStringInMap(Name);
597 if (unlikely(idxName == 0))
598 return false;
599 Grp->Name = idxName;
600
601 // Insert it into the hash table
602 unsigned long const Hash = Cache.Hash(Name);
603 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
604 Cache.HeaderP->GrpHashTable[Hash] = Group;
605
606 Grp->ID = Cache.HeaderP->GroupCount++;
607 return true;
608 }
609 /*}}}*/
610 // CacheGenerator::NewPackage - Add a new package /*{{{*/
611 // ---------------------------------------------------------------------
612 /* This creates a new package structure and adds it to the hash table */
613 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
614 const string &Arch) {
615 pkgCache::GrpIterator Grp;
616 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
617 if (unlikely(NewGroup(Grp, Name) == false))
618 return false;
619
620 Pkg = Grp.FindPkg(Arch);
621 if (Pkg.end() == false)
622 return true;
623
624 // Get a structure
625 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
626 if (unlikely(Package == 0))
627 return false;
628 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
629
630 // Insert the package into our package list
631 if (Grp->FirstPackage == 0) // the group is new
632 {
633 // Insert it into the hash table
634 unsigned long const Hash = Cache.Hash(Name);
635 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
636 Cache.HeaderP->PkgHashTable[Hash] = Package;
637 Grp->FirstPackage = Package;
638 }
639 else // Group the Packages together
640 {
641 // this package is the new last package
642 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
643 Pkg->NextPackage = LastPkg->NextPackage;
644 LastPkg->NextPackage = Package;
645 }
646 Grp->LastPackage = Package;
647
648 // Set the name, arch and the ID
649 Pkg->Name = Grp->Name;
650 Pkg->Group = Grp.Index();
651 // all is mapped to the native architecture
652 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
653 if (unlikely(idxArch == 0))
654 return false;
655 Pkg->Arch = idxArch;
656 Pkg->ID = Cache.HeaderP->PackageCount++;
657
658 return true;
659 }
660 /*}}}*/
661 // CacheGenerator::AddImplicitDepends /*{{{*/
662 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
663 pkgCache::PkgIterator &P,
664 pkgCache::VerIterator &V)
665 {
666 // copy P.Arch() into a string here as a cache remap
667 // in NewDepends() later may alter the pointer location
668 string Arch = P.Arch() == NULL ? "" : P.Arch();
669 map_ptrloc *OldDepLast = NULL;
670 /* MultiArch handling introduces a lot of implicit Dependencies:
671 - MultiArch: same → Co-Installable if they have the same version
672 - All others conflict with all other group members */
673 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
674 pkgCache::PkgIterator D = G.PackageList();
675 Dynamic<pkgCache::PkgIterator> DynD(D);
676 for (; D.end() != true; D = G.NextPkg(D))
677 {
678 if (Arch == D.Arch() || D->VersionList == 0)
679 continue;
680 /* We allow only one installed arch at the time
681 per group, therefore each group member conflicts
682 with all other group members */
683 if (coInstall == true)
684 {
685 // Replaces: ${self}:other ( << ${binary:Version})
686 NewDepends(D, V, V.VerStr(),
687 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
688 OldDepLast);
689 // Breaks: ${self}:other (!= ${binary:Version})
690 NewDepends(D, V, V.VerStr(),
691 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
692 OldDepLast);
693 } else {
694 // Conflicts: ${self}:other
695 NewDepends(D, V, "",
696 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
697 OldDepLast);
698 }
699 }
700 return true;
701 }
702 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
703 pkgCache::PkgIterator &D)
704 {
705 /* MultiArch handling introduces a lot of implicit Dependencies:
706 - MultiArch: same → Co-Installable if they have the same version
707 - All others conflict with all other group members */
708 map_ptrloc *OldDepLast = NULL;
709 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
710 if (coInstall == true)
711 {
712 // Replaces: ${self}:other ( << ${binary:Version})
713 NewDepends(D, V, V.VerStr(),
714 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
715 OldDepLast);
716 // Breaks: ${self}:other (!= ${binary:Version})
717 NewDepends(D, V, V.VerStr(),
718 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
719 OldDepLast);
720 } else {
721 // Conflicts: ${self}:other
722 NewDepends(D, V, "",
723 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
724 OldDepLast);
725 }
726 return true;
727 }
728
729 /*}}}*/
730 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
731 // ---------------------------------------------------------------------
732 /* */
733 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
734 ListParser &List)
735 {
736 if (CurrentFile == 0)
737 return true;
738
739 // Get a structure
740 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
741 if (VerFile == 0)
742 return 0;
743
744 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
745 VF->File = CurrentFile - Cache.PkgFileP;
746
747 // Link it to the end of the list
748 map_ptrloc *Last = &Ver->FileList;
749 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
750 Last = &V->NextFile;
751 VF->NextFile = *Last;
752 *Last = VF.Index();
753
754 VF->Offset = List.Offset();
755 VF->Size = List.Size();
756 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
757 Cache.HeaderP->MaxVerFileSize = VF->Size;
758 Cache.HeaderP->VerFileCount++;
759
760 return true;
761 }
762 /*}}}*/
763 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
764 // ---------------------------------------------------------------------
765 /* This puts a version structure in the linked list */
766 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
767 const string &VerStr,
768 unsigned long Next)
769 {
770 // Get a structure
771 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
772 if (Version == 0)
773 return 0;
774
775 // Fill it in
776 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
777 Dynamic<pkgCache::VerIterator> DynV(Ver);
778 Ver->NextVer = Next;
779 Ver->ID = Cache.HeaderP->VersionCount++;
780 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
781 if (unlikely(idxVerStr == 0))
782 return 0;
783 Ver->VerStr = idxVerStr;
784
785 return Version;
786 }
787 /*}}}*/
788 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
789 // ---------------------------------------------------------------------
790 /* */
791 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
792 ListParser &List)
793 {
794 if (CurrentFile == 0)
795 return true;
796
797 // Get a structure
798 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
799 if (DescFile == 0)
800 return false;
801
802 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
803 DF->File = CurrentFile - Cache.PkgFileP;
804
805 // Link it to the end of the list
806 map_ptrloc *Last = &Desc->FileList;
807 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
808 Last = &D->NextFile;
809
810 DF->NextFile = *Last;
811 *Last = DF.Index();
812
813 DF->Offset = List.Offset();
814 DF->Size = List.Size();
815 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
816 Cache.HeaderP->MaxDescFileSize = DF->Size;
817 Cache.HeaderP->DescFileCount++;
818
819 return true;
820 }
821 /*}}}*/
822 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
823 // ---------------------------------------------------------------------
824 /* This puts a description structure in the linked list */
825 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
826 const string &Lang,
827 const MD5SumValue &md5sum,
828 map_ptrloc Next)
829 {
830 // Get a structure
831 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
832 if (Description == 0)
833 return 0;
834
835 // Fill it in
836 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
837 Desc->NextDesc = Next;
838 Desc->ID = Cache.HeaderP->DescriptionCount++;
839 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
840 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
841 if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0))
842 return 0;
843 Desc->language_code = idxlanguage_code;
844 Desc->md5sum = idxmd5sum;
845
846 return Description;
847 }
848 /*}}}*/
849 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
850 // ---------------------------------------------------------------------
851 /* This creates a dependency element in the tree. It is linked to the
852 version and to the package that it is pointing to. */
853 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
854 pkgCache::VerIterator &Ver,
855 string const &Version,
856 unsigned int const &Op,
857 unsigned int const &Type,
858 map_ptrloc* &OldDepLast)
859 {
860 void const * const oldMap = Map.Data();
861 // Get a structure
862 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
863 if (unlikely(Dependency == 0))
864 return false;
865
866 // Fill it in
867 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
868 Dynamic<pkgCache::DepIterator> DynDep(Dep);
869 Dep->ParentVer = Ver.Index();
870 Dep->Type = Type;
871 Dep->CompareOp = Op;
872 Dep->ID = Cache.HeaderP->DependsCount++;
873
874 // Probe the reverse dependency list for a version string that matches
875 if (Version.empty() == false)
876 {
877 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
878 if (I->Version != 0 && I.TargetVer() == Version)
879 Dep->Version = I->Version;*/
880 if (Dep->Version == 0) {
881 map_ptrloc const index = WriteStringInMap(Version);
882 if (unlikely(index == 0))
883 return false;
884 Dep->Version = index;
885 }
886 }
887
888 // Link it to the package
889 Dep->Package = Pkg.Index();
890 Dep->NextRevDepends = Pkg->RevDepends;
891 Pkg->RevDepends = Dep.Index();
892
893 // Do we know where to link the Dependency to?
894 if (OldDepLast == NULL)
895 {
896 OldDepLast = &Ver->DependsList;
897 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
898 OldDepLast = &D->NextDepends;
899 } else if (oldMap != Map.Data())
900 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
901
902 Dep->NextDepends = *OldDepLast;
903 *OldDepLast = Dep.Index();
904 OldDepLast = &Dep->NextDepends;
905
906 return true;
907 }
908 /*}}}*/
909 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
910 // ---------------------------------------------------------------------
911 /* This creates a Group and the Package to link this dependency to if
912 needed and handles also the caching of the old endpoint */
913 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
914 const string &PackageName,
915 const string &Arch,
916 const string &Version,
917 unsigned int Op,
918 unsigned int Type)
919 {
920 pkgCache::GrpIterator Grp;
921 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
922 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
923 return false;
924
925 // Locate the target package
926 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
927 // we don't create 'none' packages and their dependencies if we can avoid it …
928 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
929 return true;
930 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
931 if (Pkg.end() == true) {
932 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
933 return false;
934 }
935
936 // Is it a file dependency?
937 if (unlikely(PackageName[0] == '/'))
938 FoundFileDeps = true;
939
940 /* Caching the old end point speeds up generation substantially */
941 if (OldDepVer != Ver) {
942 OldDepLast = NULL;
943 OldDepVer = Ver;
944 }
945
946 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
947 }
948 /*}}}*/
949 // ListParser::NewProvides - Create a Provides element /*{{{*/
950 // ---------------------------------------------------------------------
951 /* */
952 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
953 const string &PkgName,
954 const string &PkgArch,
955 const string &Version)
956 {
957 pkgCache &Cache = Owner->Cache;
958
959 // We do not add self referencing provides
960 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
961 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
962 return true;
963
964 // Get a structure
965 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
966 if (unlikely(Provides == 0))
967 return false;
968 Cache.HeaderP->ProvidesCount++;
969
970 // Fill it in
971 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
972 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
973 Prv->Version = Ver.Index();
974 Prv->NextPkgProv = Ver->ProvidesList;
975 Ver->ProvidesList = Prv.Index();
976 if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
977 return false;
978
979 // Locate the target package
980 pkgCache::PkgIterator Pkg;
981 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
982 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
983 return false;
984
985 // Link it to the package
986 Prv->ParentPkg = Pkg.Index();
987 Prv->NextProvides = Pkg->ProvidesList;
988 Pkg->ProvidesList = Prv.Index();
989
990 return true;
991 }
992 /*}}}*/
993 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
994 // ---------------------------------------------------------------------
995 /* This is used to select which file is to be associated with all newly
996 added versions. The caller is responsible for setting the IMS fields. */
997 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
998 const pkgIndexFile &Index,
999 unsigned long Flags)
1000 {
1001 // Get some space for the structure
1002 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1003 if (unlikely(idxFile == 0))
1004 return false;
1005 CurrentFile = Cache.PkgFileP + idxFile;
1006
1007 // Fill it in
1008 map_ptrloc const idxFileName = WriteStringInMap(File);
1009 map_ptrloc const idxSite = WriteUniqString(Site);
1010 if (unlikely(idxFileName == 0 || idxSite == 0))
1011 return false;
1012 CurrentFile->FileName = idxFileName;
1013 CurrentFile->Site = idxSite;
1014 CurrentFile->NextFile = Cache.HeaderP->FileList;
1015 CurrentFile->Flags = Flags;
1016 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1017 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1018 if (unlikely(idxIndexType == 0))
1019 return false;
1020 CurrentFile->IndexType = idxIndexType;
1021 PkgFileName = File;
1022 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1023 Cache.HeaderP->PackageFileCount++;
1024
1025 if (Progress != 0)
1026 Progress->SubProgress(Index.Size());
1027 return true;
1028 }
1029 /*}}}*/
1030 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1031 // ---------------------------------------------------------------------
1032 /* This is used to create handles to strings. Given the same text it
1033 always returns the same number */
1034 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1035 unsigned int Size)
1036 {
1037 /* We use a very small transient hash table here, this speeds up generation
1038 by a fair amount on slower machines */
1039 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1040 if (Bucket != 0 &&
1041 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1042 return Bucket->String;
1043
1044 // Search for an insertion point
1045 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1046 int Res = 1;
1047 map_ptrloc *Last = &Cache.HeaderP->StringList;
1048 for (; I != Cache.StringItemP; Last = &I->NextItem,
1049 I = Cache.StringItemP + I->NextItem)
1050 {
1051 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1052 if (Res >= 0)
1053 break;
1054 }
1055
1056 // Match
1057 if (Res == 0)
1058 {
1059 Bucket = I;
1060 return I->String;
1061 }
1062
1063 // Get a structure
1064 void const * const oldMap = Map.Data();
1065 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1066 if (Item == 0)
1067 return 0;
1068
1069 map_ptrloc const idxString = WriteStringInMap(S,Size);
1070 if (unlikely(idxString == 0))
1071 return 0;
1072 if (oldMap != Map.Data()) {
1073 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1074 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1075 }
1076 *Last = Item;
1077
1078 // Fill in the structure
1079 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1080 ItemP->NextItem = I - Cache.StringItemP;
1081 ItemP->String = idxString;
1082
1083 Bucket = ItemP;
1084 return ItemP->String;
1085 }
1086 /*}}}*/
1087 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1088 // ---------------------------------------------------------------------
1089 /* This just verifies that each file in the list of index files exists,
1090 has matching attributes with the cache and the cache does not have
1091 any extra files. */
1092 static bool CheckValidity(const string &CacheFile,
1093 pkgSourceList &List,
1094 FileIterator Start,
1095 FileIterator End,
1096 MMap **OutMap = 0)
1097 {
1098 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1099 // No file, certainly invalid
1100 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1101 {
1102 if (Debug == true)
1103 std::clog << "CacheFile doesn't exist" << std::endl;
1104 return false;
1105 }
1106
1107 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1108 {
1109 if (Debug == true)
1110 std::clog << "sources.list is newer than the cache" << std::endl;
1111 return false;
1112 }
1113
1114 // Map it
1115 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1116 SPtr<MMap> Map = new MMap(CacheF,0);
1117 pkgCache Cache(Map);
1118 if (_error->PendingError() == true || Map->Size() == 0)
1119 {
1120 if (Debug == true)
1121 std::clog << "Errors are pending or Map is empty()" << std::endl;
1122 _error->Discard();
1123 return false;
1124 }
1125
1126 /* Now we check every index file, see if it is in the cache,
1127 verify the IMS data and check that it is on the disk too.. */
1128 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1129 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1130 for (; Start != End; ++Start)
1131 {
1132 if (Debug == true)
1133 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1134 if ((*Start)->HasPackages() == false)
1135 {
1136 if (Debug == true)
1137 std::clog << "Has NO packages" << std::endl;
1138 continue;
1139 }
1140
1141 if ((*Start)->Exists() == false)
1142 {
1143 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1144 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1145 (*Start)->Describe().c_str());
1146 #endif
1147 if (Debug == true)
1148 std::clog << "file doesn't exist" << std::endl;
1149 continue;
1150 }
1151
1152 // FindInCache is also expected to do an IMS check.
1153 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1154 if (File.end() == true)
1155 {
1156 if (Debug == true)
1157 std::clog << "FindInCache returned end-Pointer" << std::endl;
1158 return false;
1159 }
1160
1161 Visited[File->ID] = true;
1162 if (Debug == true)
1163 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1164 }
1165
1166 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1167 if (Visited[I] == false)
1168 {
1169 if (Debug == true)
1170 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1171 return false;
1172 }
1173
1174 if (_error->PendingError() == true)
1175 {
1176 if (Debug == true)
1177 {
1178 std::clog << "Validity failed because of pending errors:" << std::endl;
1179 _error->DumpErrors();
1180 }
1181 _error->Discard();
1182 return false;
1183 }
1184
1185 if (OutMap != 0)
1186 *OutMap = Map.UnGuard();
1187 return true;
1188 }
1189 /*}}}*/
1190 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1191 // ---------------------------------------------------------------------
1192 /* Size is kind of an abstract notion that is only used for the progress
1193 meter */
1194 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1195 {
1196 unsigned long TotalSize = 0;
1197 for (; Start != End; ++Start)
1198 {
1199 if ((*Start)->HasPackages() == false)
1200 continue;
1201 TotalSize += (*Start)->Size();
1202 }
1203 return TotalSize;
1204 }
1205 /*}}}*/
1206 // BuildCache - Merge the list of index files into the cache /*{{{*/
1207 // ---------------------------------------------------------------------
1208 /* */
1209 static bool BuildCache(pkgCacheGenerator &Gen,
1210 OpProgress *Progress,
1211 unsigned long &CurrentSize,unsigned long TotalSize,
1212 FileIterator Start, FileIterator End)
1213 {
1214 FileIterator I;
1215 for (I = Start; I != End; ++I)
1216 {
1217 if ((*I)->HasPackages() == false)
1218 continue;
1219
1220 if ((*I)->Exists() == false)
1221 continue;
1222
1223 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1224 {
1225 _error->Warning("Duplicate sources.list entry %s",
1226 (*I)->Describe().c_str());
1227 continue;
1228 }
1229
1230 unsigned long Size = (*I)->Size();
1231 if (Progress != NULL)
1232 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1233 CurrentSize += Size;
1234
1235 if ((*I)->Merge(Gen,Progress) == false)
1236 return false;
1237 }
1238
1239 if (Gen.HasFileDeps() == true)
1240 {
1241 if (Progress != NULL)
1242 Progress->Done();
1243 TotalSize = ComputeSize(Start, End);
1244 CurrentSize = 0;
1245 for (I = Start; I != End; ++I)
1246 {
1247 unsigned long Size = (*I)->Size();
1248 if (Progress != NULL)
1249 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1250 CurrentSize += Size;
1251 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1252 return false;
1253 }
1254 }
1255
1256 return true;
1257 }
1258 /*}}}*/
1259 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1260 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1261 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1262 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1263 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1264 Flags |= MMap::Moveable;
1265 if (_config->FindB("APT::Cache-Fallback", false) == true)
1266 Flags |= MMap::Fallback;
1267 if (CacheF != NULL)
1268 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1269 else
1270 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1271 }
1272 /*}}}*/
1273 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1274 // ---------------------------------------------------------------------
1275 /* This makes sure that the status cache (the cache that has all
1276 index files from the sources list and all local ones) is ready
1277 to be mmaped. If OutMap is not zero then a MMap object representing
1278 the cache will be stored there. This is pretty much mandetory if you
1279 are using AllowMem. AllowMem lets the function be run as non-root
1280 where it builds the cache 'fast' into a memory buffer. */
1281 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1282 MMap **OutMap, bool AllowMem)
1283 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1284 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1285 MMap **OutMap,bool AllowMem)
1286 {
1287 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1288
1289 std::vector<pkgIndexFile *> Files;
1290 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1291 i != List.end();
1292 ++i)
1293 {
1294 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1295 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1296 j != Indexes->end();
1297 ++j)
1298 Files.push_back (*j);
1299 }
1300
1301 unsigned long const EndOfSource = Files.size();
1302 if (_system->AddStatusFiles(Files) == false)
1303 return false;
1304
1305 // Decide if we can write to the files..
1306 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1307 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1308
1309 // ensure the cache directory exists
1310 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1311 {
1312 string dir = _config->FindDir("Dir::Cache");
1313 size_t const len = dir.size();
1314 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1315 dir = dir.substr(0, len - 5);
1316 if (CacheFile.empty() == false)
1317 CreateDirectory(dir, flNotFile(CacheFile));
1318 if (SrcCacheFile.empty() == false)
1319 CreateDirectory(dir, flNotFile(SrcCacheFile));
1320 }
1321
1322 // Decide if we can write to the cache
1323 bool Writeable = false;
1324 if (CacheFile.empty() == false)
1325 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1326 else
1327 if (SrcCacheFile.empty() == false)
1328 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1329 if (Debug == true)
1330 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1331
1332 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1333 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1334
1335 if (Progress != NULL)
1336 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1337
1338 // Cache is OK, Fin.
1339 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1340 {
1341 if (Progress != NULL)
1342 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1343 if (Debug == true)
1344 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1345 return true;
1346 }
1347 else if (Debug == true)
1348 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1349
1350 /* At this point we know we need to reconstruct the package cache,
1351 begin. */
1352 SPtr<FileFd> CacheF;
1353 SPtr<DynamicMMap> Map;
1354 if (Writeable == true && CacheFile.empty() == false)
1355 {
1356 _error->PushToStack();
1357 unlink(CacheFile.c_str());
1358 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1359 fchmod(CacheF->Fd(),0644);
1360 Map = CreateDynamicMMap(CacheF, MMap::Public);
1361 if (_error->PendingError() == true)
1362 {
1363 delete CacheF.UnGuard();
1364 delete Map.UnGuard();
1365 if (Debug == true)
1366 std::clog << "Open filebased MMap FAILED" << std::endl;
1367 Writeable = false;
1368 if (AllowMem == false)
1369 {
1370 _error->MergeWithStack();
1371 return false;
1372 }
1373 _error->RevertToStack();
1374 }
1375 else
1376 {
1377 _error->MergeWithStack();
1378 if (Debug == true)
1379 std::clog << "Open filebased MMap" << std::endl;
1380 }
1381 }
1382 if (Writeable == false || CacheFile.empty() == true)
1383 {
1384 // Just build it in memory..
1385 Map = CreateDynamicMMap(NULL);
1386 if (Debug == true)
1387 std::clog << "Open memory Map (not filebased)" << std::endl;
1388 }
1389
1390 // Lets try the source cache.
1391 unsigned long CurrentSize = 0;
1392 unsigned long TotalSize = 0;
1393 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1394 Files.begin()+EndOfSource) == true)
1395 {
1396 if (Debug == true)
1397 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1398 // Preload the map with the source cache
1399 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1400 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1401 if ((alloc == 0 && _error->PendingError())
1402 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1403 SCacheF.Size()) == false)
1404 return false;
1405
1406 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1407
1408 // Build the status cache
1409 pkgCacheGenerator Gen(Map.Get(),Progress);
1410 if (_error->PendingError() == true)
1411 return false;
1412 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1413 Files.begin()+EndOfSource,Files.end()) == false)
1414 return false;
1415 }
1416 else
1417 {
1418 if (Debug == true)
1419 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1420 TotalSize = ComputeSize(Files.begin(),Files.end());
1421
1422 // Build the source cache
1423 pkgCacheGenerator Gen(Map.Get(),Progress);
1424 if (_error->PendingError() == true)
1425 return false;
1426 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1427 Files.begin(),Files.begin()+EndOfSource) == false)
1428 return false;
1429
1430 // Write it back
1431 if (Writeable == true && SrcCacheFile.empty() == false)
1432 {
1433 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1434 if (_error->PendingError() == true)
1435 return false;
1436
1437 fchmod(SCacheF.Fd(),0644);
1438
1439 // Write out the main data
1440 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1441 return _error->Error(_("IO Error saving source cache"));
1442 SCacheF.Sync();
1443
1444 // Write out the proper header
1445 Gen.GetCache().HeaderP->Dirty = false;
1446 if (SCacheF.Seek(0) == false ||
1447 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1448 return _error->Error(_("IO Error saving source cache"));
1449 Gen.GetCache().HeaderP->Dirty = true;
1450 SCacheF.Sync();
1451 }
1452
1453 // Build the status cache
1454 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1455 Files.begin()+EndOfSource,Files.end()) == false)
1456 return false;
1457 }
1458 if (Debug == true)
1459 std::clog << "Caches are ready for shipping" << std::endl;
1460
1461 if (_error->PendingError() == true)
1462 return false;
1463 if (OutMap != 0)
1464 {
1465 if (CacheF != 0)
1466 {
1467 delete Map.UnGuard();
1468 *OutMap = new MMap(*CacheF,0);
1469 }
1470 else
1471 {
1472 *OutMap = Map.UnGuard();
1473 }
1474 }
1475
1476 return true;
1477 }
1478 /*}}}*/
1479 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1480 // ---------------------------------------------------------------------
1481 /* */
1482 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1483 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1484 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1485 {
1486 std::vector<pkgIndexFile *> Files;
1487 unsigned long EndOfSource = Files.size();
1488 if (_system->AddStatusFiles(Files) == false)
1489 return false;
1490
1491 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1492 unsigned long CurrentSize = 0;
1493 unsigned long TotalSize = 0;
1494
1495 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1496
1497 // Build the status cache
1498 if (Progress != NULL)
1499 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1500 pkgCacheGenerator Gen(Map.Get(),Progress);
1501 if (_error->PendingError() == true)
1502 return false;
1503 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1504 Files.begin()+EndOfSource,Files.end()) == false)
1505 return false;
1506
1507 if (_error->PendingError() == true)
1508 return false;
1509 *OutMap = Map.UnGuard();
1510
1511 return true;
1512 }
1513 /*}}}*/
1514 // IsDuplicateDescription /*{{{*/
1515 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1516 MD5SumValue const &CurMd5, std::string const &CurLang)
1517 {
1518 // Descriptions in the same link-list have all the same md5
1519 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1520 return false;
1521 for (; Desc.end() == false; ++Desc)
1522 if (Desc.LanguageCode() == CurLang)
1523 return true;
1524 return false;
1525 }
1526 /*}}}*/
1527 // CacheGenerator::FinishCache /*{{{*/
1528 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1529 {
1530 return true;
1531 }
1532 /*}}}*/