factor version string creation out of NewDepends, so we can easily reuse
[ntk/apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
74 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
75 Cache.HeaderP->Architecture = idxArchitecture;
76 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
77 return;
78 Cache.ReMap();
79 }
80 else
81 {
82 // Map directly from the existing file
83 Cache.ReMap();
84 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
85 if (Cache.VS != _system->VS)
86 {
87 _error->Error(_("Cache has an incompatible versioning system"));
88 return;
89 }
90 }
91
92 Cache.HeaderP->Dirty = true;
93 Map.Sync(0,sizeof(pkgCache::Header));
94 }
95 /*}}}*/
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
101 {
102 if (_error->PendingError() == true)
103 return;
104 if (Map.Sync() == false)
105 return;
106
107 Cache.HeaderP->Dirty = false;
108 Cache.HeaderP->CacheFileSize = Map.Size();
109 Map.Sync(0,sizeof(pkgCache::Header));
110 }
111 /*}}}*/
112 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
113 if (oldMap == newMap)
114 return;
115
116 if (_config->FindB("Debug::pkgCacheGen", false))
117 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
118
119 Cache.ReMap(false);
120
121 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
122
123 for (size_t i = 0; i < _count(UniqHash); ++i)
124 if (UniqHash[i] != 0)
125 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
126
127 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
128 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
129 (*i)->ReMap(oldMap, newMap);
130 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
131 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
132 (*i)->ReMap(oldMap, newMap);
133 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
134 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
135 (*i)->ReMap(oldMap, newMap);
136 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
137 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
138 (*i)->ReMap(oldMap, newMap);
139 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
140 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
141 (*i)->ReMap(oldMap, newMap);
142 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
143 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
144 (*i)->ReMap(oldMap, newMap);
145 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
146 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
147 (*i)->ReMap(oldMap, newMap);
148 } /*}}}*/
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
151 const unsigned long &Len) {
152 void const * const oldMap = Map.Data();
153 map_ptrloc const index = Map.WriteString(String, Len);
154 if (index != 0)
155 ReMap(oldMap, Map.Data());
156 return index;
157 }
158 /*}}}*/
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
161 void const * const oldMap = Map.Data();
162 map_ptrloc const index = Map.WriteString(String);
163 if (index != 0)
164 ReMap(oldMap, Map.Data());
165 return index;
166 }
167 /*}}}*/
168 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
169 void const * const oldMap = Map.Data();
170 map_ptrloc const index = Map.Allocate(size);
171 if (index != 0)
172 ReMap(oldMap, Map.Data());
173 return index;
174 }
175 /*}}}*/
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser &List,
181 pkgCache::VerIterator *OutVer)
182 {
183 List.Owner = this;
184
185 unsigned int Counter = 0;
186 while (List.Step() == true)
187 {
188 string const PackageName = List.Package();
189 if (PackageName.empty() == true)
190 return false;
191
192 Counter++;
193 if (Counter % 100 == 0 && Progress != 0)
194 Progress->Progress(List.Offset());
195
196 string Arch = List.Architecture();
197 string const Version = List.Version();
198 if (Version.empty() == true && Arch.empty() == true)
199 {
200 // package descriptions
201 if (MergeListGroup(List, PackageName) == false)
202 return false;
203 continue;
204 }
205
206 if (Arch.empty() == true)
207 {
208 // use the pseudo arch 'none' for arch-less packages
209 Arch = "none";
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP;
214 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
215 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName.c_str(), "NewPackage", 0);
220 }
221
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg;
224 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
225 if (NewPackage(Pkg, PackageName, Arch) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName.c_str(), "NewPackage", 1);
230
231
232 if (Version.empty() == true)
233 {
234 if (MergeListPackage(List, Pkg) == false)
235 return false;
236 }
237 else
238 {
239 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
240 return false;
241 }
242
243 if (OutVer != 0)
244 {
245 FoundFileDeps |= List.HasFileDeps();
246 return true;
247 }
248 }
249
250 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
251 return _error->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
254 return _error->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
257 return _error->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
260 return _error->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
262
263 FoundFileDeps |= List.HasFileDeps();
264 return true;
265 }
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
268 {
269 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp.end() == true)
274 return true;
275 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
276
277 pkgCache::PkgIterator Pkg;
278 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
279 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
280 if (MergeListPackage(List, Pkg) == false)
281 return false;
282
283 return true;
284 }
285 /*}}}*/
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
288 {
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator Ver(Cache);
292 Dynamic<pkgCache::VerIterator> DynVer(Ver);
293 if (List.UsePackage(Pkg, Ver) == false)
294 return _error->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg.Name(), "UsePackage", 1);
296
297 // Find the right version to write the description
298 MD5SumValue CurMd5 = List.Description_md5();
299 std::string CurLang = List.DescriptionLanguage();
300
301 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
302 {
303 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
304
305 // a version can only have one md5 describing it
306 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
307 continue;
308
309 // don't add a new description if we have one for the given
310 // md5 && language
311 if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
312 continue;
313
314 pkgCache::DescIterator Desc;
315 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
316
317 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
318 if (unlikely(descindex == 0 && _error->PendingError()))
319 return _error->Error(_("Error occurred while processing %s (%s%d)"),
320 Pkg.Name(), "NewDescription", 1);
321
322 Desc->ParentPkg = Pkg.Index();
323
324 // we add at the end, so that the start is constant as we need
325 // that to be able to efficiently share these lists
326 VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
327 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
328 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
329 *LastNextDesc = descindex;
330
331 if (NewFileDesc(Desc,List) == false)
332 return _error->Error(_("Error occurred while processing %s (%s%d)"),
333 Pkg.Name(), "NewFileDesc", 1);
334
335 // we can stop here as all "same" versions will share the description
336 break;
337 }
338
339 return true;
340 }
341 /*}}}*/
342 // CacheGenerator::MergeListVersion /*{{{*/
343 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
344 std::string const &Version, pkgCache::VerIterator* &OutVer)
345 {
346 pkgCache::VerIterator Ver = Pkg.VersionList();
347 Dynamic<pkgCache::VerIterator> DynVer(Ver);
348 map_ptrloc *LastVer = &Pkg->VersionList;
349 void const * oldMap = Map.Data();
350
351 unsigned long const Hash = List.VersionHash();
352 if (Ver.end() == false)
353 {
354 /* We know the list is sorted so we use that fact in the search.
355 Insertion of new versions is done with correct sorting */
356 int Res = 1;
357 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
358 {
359 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
360 // Version is higher as current version - insert here
361 if (Res > 0)
362 break;
363 // Versionstrings are equal - is hash also equal?
364 if (Res == 0 && Ver->Hash == Hash)
365 break;
366 // proceed with the next till we have either the right
367 // or we found another version (which will be lower)
368 }
369
370 /* We already have a version for this item, record that we saw it */
371 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
372 {
373 if (List.UsePackage(Pkg,Ver) == false)
374 return _error->Error(_("Error occurred while processing %s (%s%d)"),
375 Pkg.Name(), "UsePackage", 2);
376
377 if (NewFileVer(Ver,List) == false)
378 return _error->Error(_("Error occurred while processing %s (%s%d)"),
379 Pkg.Name(), "NewFileVer", 1);
380
381 // Read only a single record and return
382 if (OutVer != 0)
383 {
384 *OutVer = Ver;
385 return true;
386 }
387
388 return true;
389 }
390 }
391
392 // Add a new version
393 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
394 if (verindex == 0 && _error->PendingError())
395 return _error->Error(_("Error occurred while processing %s (%s%d)"),
396 Pkg.Name(), "NewVersion", 1);
397
398 if (oldMap != Map.Data())
399 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
400 *LastVer = verindex;
401 Ver->ParentPkg = Pkg.Index();
402 Ver->Hash = Hash;
403
404 if (unlikely(List.NewVersion(Ver) == false))
405 return _error->Error(_("Error occurred while processing %s (%s%d)"),
406 Pkg.Name(), "NewVersion", 2);
407
408 if (unlikely(List.UsePackage(Pkg,Ver) == false))
409 return _error->Error(_("Error occurred while processing %s (%s%d)"),
410 Pkg.Name(), "UsePackage", 3);
411
412 if (unlikely(NewFileVer(Ver,List) == false))
413 return _error->Error(_("Error occurred while processing %s (%s%d)"),
414 Pkg.Name(), "NewFileVer", 2);
415
416 pkgCache::GrpIterator Grp = Pkg.Group();
417 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
418
419 /* If it is the first version of this package we need to add implicit
420 Multi-Arch dependencies to all other package versions in the group now -
421 otherwise we just add them for this new version */
422 if (Pkg.VersionList()->NextVer == 0)
423 {
424 pkgCache::PkgIterator P = Grp.PackageList();
425 Dynamic<pkgCache::PkgIterator> DynP(P);
426 for (; P.end() != true; P = Grp.NextPkg(P))
427 {
428 if (P->ID == Pkg->ID)
429 continue;
430 pkgCache::VerIterator V = P.VersionList();
431 Dynamic<pkgCache::VerIterator> DynV(V);
432 for (; V.end() != true; ++V)
433 if (unlikely(AddImplicitDepends(V, Pkg) == false))
434 return _error->Error(_("Error occurred while processing %s (%s%d)"),
435 Pkg.Name(), "AddImplicitDepends", 1);
436 }
437 /* :none packages are packages without an architecture. They are forbidden by
438 debian-policy, so usually they will only be in (old) dpkg status files -
439 and dpkg will complain about them - and are pretty rare. We therefore do
440 usually not create conflicts while the parent is created, but only if a :none
441 package (= the target) appears. This creates incorrect dependencies on :none
442 for architecture-specific dependencies on the package we copy from, but we
443 will ignore this bug as architecture-specific dependencies are only allowed
444 in jessie and until then the :none packages should be extinct (hopefully).
445 In other words: This should work long enough to allow graceful removal of
446 these packages, it is not supposed to allow users to keep using them … */
447 if (strcmp(Pkg.Arch(), "none") == 0)
448 {
449 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
450 if (M.end() == false && Pkg != M)
451 {
452 pkgCache::DepIterator D = M.RevDependsList();
453 Dynamic<pkgCache::DepIterator> DynD(D);
454 for (; D.end() == false; ++D)
455 {
456 if ((D->Type != pkgCache::Dep::Conflicts &&
457 D->Type != pkgCache::Dep::DpkgBreaks &&
458 D->Type != pkgCache::Dep::Replaces) ||
459 D.ParentPkg().Group() == Grp)
460 continue;
461
462 map_ptrloc *OldDepLast = NULL;
463 pkgCache::VerIterator ConVersion = D.ParentVer();
464 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
465 // duplicate the Conflicts/Breaks/Replaces for :none arch
466 NewDepends(Pkg, ConVersion, D->Version,
467 D->CompareOp, D->Type, OldDepLast);
468 }
469 }
470 }
471 }
472 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
473 return _error->Error(_("Error occurred while processing %s (%s%d)"),
474 Pkg.Name(), "AddImplicitDepends", 2);
475
476 // Read only a single record and return
477 if (OutVer != 0)
478 {
479 *OutVer = Ver;
480 return true;
481 }
482
483 /* Record the Description (it is not translated) */
484 MD5SumValue CurMd5 = List.Description_md5();
485 if (CurMd5.Value().empty() == true)
486 return true;
487 std::string CurLang = List.DescriptionLanguage();
488
489 /* Before we add a new description we first search in the group for
490 a version with a description of the same MD5 - if so we reuse this
491 description group instead of creating our own for this version */
492 for (pkgCache::PkgIterator P = Grp.PackageList();
493 P.end() == false; P = Grp.NextPkg(P))
494 {
495 for (pkgCache::VerIterator V = P.VersionList();
496 V.end() == false; ++V)
497 {
498 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
499 continue;
500 Ver->DescriptionList = V->DescriptionList;
501 return true;
502 }
503 }
504
505 // We haven't found reusable descriptions, so add the first description
506 pkgCache::DescIterator Desc = Ver.DescriptionList();
507 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
508
509 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
510 if (unlikely(descindex == 0 && _error->PendingError()))
511 return _error->Error(_("Error occurred while processing %s (%s%d)"),
512 Pkg.Name(), "NewDescription", 2);
513
514 Desc->ParentPkg = Pkg.Index();
515 Ver->DescriptionList = descindex;
516
517 if (NewFileDesc(Desc,List) == false)
518 return _error->Error(_("Error occurred while processing %s (%s%d)"),
519 Pkg.Name(), "NewFileDesc", 2);
520
521 return true;
522 }
523 /*}}}*/
524 /*}}}*/
525 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
526 // ---------------------------------------------------------------------
527 /* If we found any file depends while parsing the main list we need to
528 resolve them. Since it is undesired to load the entire list of files
529 into the cache as virtual packages we do a two stage effort. MergeList
530 identifies the file depends and this creates Provdies for them by
531 re-parsing all the indexs. */
532 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
533 {
534 List.Owner = this;
535
536 unsigned int Counter = 0;
537 while (List.Step() == true)
538 {
539 string PackageName = List.Package();
540 if (PackageName.empty() == true)
541 return false;
542 string Version = List.Version();
543 if (Version.empty() == true)
544 continue;
545
546 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
547 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
548 if (Pkg.end() == true)
549 return _error->Error(_("Error occurred while processing %s (%s%d)"),
550 PackageName.c_str(), "FindPkg", 1);
551 Counter++;
552 if (Counter % 100 == 0 && Progress != 0)
553 Progress->Progress(List.Offset());
554
555 unsigned long Hash = List.VersionHash();
556 pkgCache::VerIterator Ver = Pkg.VersionList();
557 Dynamic<pkgCache::VerIterator> DynVer(Ver);
558 for (; Ver.end() == false; ++Ver)
559 {
560 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
561 {
562 if (List.CollectFileProvides(Cache,Ver) == false)
563 return _error->Error(_("Error occurred while processing %s (%s%d)"),
564 PackageName.c_str(), "CollectFileProvides", 1);
565 break;
566 }
567 }
568
569 if (Ver.end() == true)
570 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
571 }
572
573 return true;
574 }
575 /*}}}*/
576 // CacheGenerator::NewGroup - Add a new group /*{{{*/
577 // ---------------------------------------------------------------------
578 /* This creates a new group structure and adds it to the hash table */
579 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
580 {
581 Grp = Cache.FindGrp(Name);
582 if (Grp.end() == false)
583 return true;
584
585 // Get a structure
586 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
587 if (unlikely(Group == 0))
588 return false;
589
590 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
591 map_ptrloc const idxName = WriteStringInMap(Name);
592 if (unlikely(idxName == 0))
593 return false;
594 Grp->Name = idxName;
595
596 // Insert it into the hash table
597 unsigned long const Hash = Cache.Hash(Name);
598 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
599 Cache.HeaderP->GrpHashTable[Hash] = Group;
600
601 Grp->ID = Cache.HeaderP->GroupCount++;
602 return true;
603 }
604 /*}}}*/
605 // CacheGenerator::NewPackage - Add a new package /*{{{*/
606 // ---------------------------------------------------------------------
607 /* This creates a new package structure and adds it to the hash table */
608 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
609 const string &Arch) {
610 pkgCache::GrpIterator Grp;
611 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
612 if (unlikely(NewGroup(Grp, Name) == false))
613 return false;
614
615 Pkg = Grp.FindPkg(Arch);
616 if (Pkg.end() == false)
617 return true;
618
619 // Get a structure
620 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
621 if (unlikely(Package == 0))
622 return false;
623 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
624
625 // Insert the package into our package list
626 if (Grp->FirstPackage == 0) // the group is new
627 {
628 // Insert it into the hash table
629 unsigned long const Hash = Cache.Hash(Name);
630 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
631 Cache.HeaderP->PkgHashTable[Hash] = Package;
632 Grp->FirstPackage = Package;
633 }
634 else // Group the Packages together
635 {
636 // this package is the new last package
637 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
638 Pkg->NextPackage = LastPkg->NextPackage;
639 LastPkg->NextPackage = Package;
640 }
641 Grp->LastPackage = Package;
642
643 // Set the name, arch and the ID
644 Pkg->Name = Grp->Name;
645 Pkg->Group = Grp.Index();
646 // all is mapped to the native architecture
647 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
648 if (unlikely(idxArch == 0))
649 return false;
650 Pkg->Arch = idxArch;
651 Pkg->ID = Cache.HeaderP->PackageCount++;
652
653 return true;
654 }
655 /*}}}*/
656 // CacheGenerator::AddImplicitDepends /*{{{*/
657 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
658 pkgCache::PkgIterator &P,
659 pkgCache::VerIterator &V)
660 {
661 // copy P.Arch() into a string here as a cache remap
662 // in NewDepends() later may alter the pointer location
663 string Arch = P.Arch() == NULL ? "" : P.Arch();
664 map_ptrloc *OldDepLast = NULL;
665 /* MultiArch handling introduces a lot of implicit Dependencies:
666 - MultiArch: same → Co-Installable if they have the same version
667 - All others conflict with all other group members */
668 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
669 pkgCache::PkgIterator D = G.PackageList();
670 Dynamic<pkgCache::PkgIterator> DynD(D);
671 map_ptrloc const VerStrIdx = V->VerStr;
672 for (; D.end() != true; D = G.NextPkg(D))
673 {
674 if (Arch == D.Arch() || D->VersionList == 0)
675 continue;
676 /* We allow only one installed arch at the time
677 per group, therefore each group member conflicts
678 with all other group members */
679 if (coInstall == true)
680 {
681 // Replaces: ${self}:other ( << ${binary:Version})
682 NewDepends(D, V, VerStrIdx,
683 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
684 OldDepLast);
685 // Breaks: ${self}:other (!= ${binary:Version})
686 NewDepends(D, V, VerStrIdx,
687 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
688 OldDepLast);
689 } else {
690 // Conflicts: ${self}:other
691 NewDepends(D, V, 0,
692 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
693 OldDepLast);
694 }
695 }
696 return true;
697 }
698 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
699 pkgCache::PkgIterator &D)
700 {
701 /* MultiArch handling introduces a lot of implicit Dependencies:
702 - MultiArch: same → Co-Installable if they have the same version
703 - All others conflict with all other group members */
704 map_ptrloc *OldDepLast = NULL;
705 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
706 if (coInstall == true)
707 {
708 map_ptrloc const VerStrIdx = V->VerStr;
709 // Replaces: ${self}:other ( << ${binary:Version})
710 NewDepends(D, V, VerStrIdx,
711 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
712 OldDepLast);
713 // Breaks: ${self}:other (!= ${binary:Version})
714 NewDepends(D, V, VerStrIdx,
715 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
716 OldDepLast);
717 } else {
718 // Conflicts: ${self}:other
719 NewDepends(D, V, 0,
720 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
721 OldDepLast);
722 }
723 return true;
724 }
725
726 /*}}}*/
727 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
728 // ---------------------------------------------------------------------
729 /* */
730 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
731 ListParser &List)
732 {
733 if (CurrentFile == 0)
734 return true;
735
736 // Get a structure
737 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
738 if (VerFile == 0)
739 return 0;
740
741 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
742 VF->File = CurrentFile - Cache.PkgFileP;
743
744 // Link it to the end of the list
745 map_ptrloc *Last = &Ver->FileList;
746 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
747 Last = &V->NextFile;
748 VF->NextFile = *Last;
749 *Last = VF.Index();
750
751 VF->Offset = List.Offset();
752 VF->Size = List.Size();
753 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
754 Cache.HeaderP->MaxVerFileSize = VF->Size;
755 Cache.HeaderP->VerFileCount++;
756
757 return true;
758 }
759 /*}}}*/
760 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
761 // ---------------------------------------------------------------------
762 /* This puts a version structure in the linked list */
763 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
764 const string &VerStr,
765 unsigned long Next)
766 {
767 // Get a structure
768 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
769 if (Version == 0)
770 return 0;
771
772 // Fill it in
773 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
774 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
775 Ver->NextVer = Next;
776 Ver->ID = Cache.HeaderP->VersionCount++;
777 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
778 if (unlikely(idxVerStr == 0))
779 return 0;
780 Ver->VerStr = idxVerStr;
781
782 return Version;
783 }
784 /*}}}*/
785 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
786 // ---------------------------------------------------------------------
787 /* */
788 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
789 ListParser &List)
790 {
791 if (CurrentFile == 0)
792 return true;
793
794 // Get a structure
795 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
796 if (DescFile == 0)
797 return false;
798
799 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
800 DF->File = CurrentFile - Cache.PkgFileP;
801
802 // Link it to the end of the list
803 map_ptrloc *Last = &Desc->FileList;
804 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
805 Last = &D->NextFile;
806
807 DF->NextFile = *Last;
808 *Last = DF.Index();
809
810 DF->Offset = List.Offset();
811 DF->Size = List.Size();
812 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
813 Cache.HeaderP->MaxDescFileSize = DF->Size;
814 Cache.HeaderP->DescFileCount++;
815
816 return true;
817 }
818 /*}}}*/
819 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
820 // ---------------------------------------------------------------------
821 /* This puts a description structure in the linked list */
822 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
823 const string &Lang,
824 const MD5SumValue &md5sum,
825 map_ptrloc idxmd5str)
826 {
827 // Get a structure
828 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
829 if (Description == 0)
830 return 0;
831
832 // Fill it in
833 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
834 Desc->ID = Cache.HeaderP->DescriptionCount++;
835 map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
836 if (unlikely(idxlanguage_code == 0))
837 return 0;
838 Desc->language_code = idxlanguage_code;
839
840 if (idxmd5str != 0)
841 Desc->md5sum = idxmd5str;
842 else
843 {
844 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
845 if (unlikely(idxmd5sum == 0))
846 return 0;
847 Desc->md5sum = idxmd5sum;
848 }
849
850 return Description;
851 }
852 /*}}}*/
853 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
854 // ---------------------------------------------------------------------
855 /* This creates a dependency element in the tree. It is linked to the
856 version and to the package that it is pointing to. */
857 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
858 pkgCache::VerIterator &Ver,
859 string const &Version,
860 unsigned int const &Op,
861 unsigned int const &Type,
862 map_ptrloc* &OldDepLast)
863 {
864 map_ptrloc index = 0;
865 if (Version.empty() == false)
866 {
867 void const * const oldMap = Map.Data();
868 index = WriteStringInMap(Version);
869 if (unlikely(index == 0))
870 return false;
871 if (oldMap != Map.Data())
872 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
873 }
874 return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
875 }
876 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
877 pkgCache::VerIterator &Ver,
878 map_ptrloc const Version,
879 unsigned int const &Op,
880 unsigned int const &Type,
881 map_ptrloc* &OldDepLast)
882 {
883 void const * const oldMap = Map.Data();
884 // Get a structure
885 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
886 if (unlikely(Dependency == 0))
887 return false;
888
889 // Fill it in
890 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
891 Dynamic<pkgCache::DepIterator> DynDep(Dep);
892 Dep->ParentVer = Ver.Index();
893 Dep->Type = Type;
894 Dep->CompareOp = Op;
895 Dep->Version = Version;
896 Dep->ID = Cache.HeaderP->DependsCount++;
897
898 // Link it to the package
899 Dep->Package = Pkg.Index();
900 Dep->NextRevDepends = Pkg->RevDepends;
901 Pkg->RevDepends = Dep.Index();
902
903 // Do we know where to link the Dependency to?
904 if (OldDepLast == NULL)
905 {
906 OldDepLast = &Ver->DependsList;
907 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
908 OldDepLast = &D->NextDepends;
909 } else if (oldMap != Map.Data())
910 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
911
912 Dep->NextDepends = *OldDepLast;
913 *OldDepLast = Dep.Index();
914 OldDepLast = &Dep->NextDepends;
915
916 return true;
917 }
918 /*}}}*/
919 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
920 // ---------------------------------------------------------------------
921 /* This creates a Group and the Package to link this dependency to if
922 needed and handles also the caching of the old endpoint */
923 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
924 const string &PackageName,
925 const string &Arch,
926 const string &Version,
927 unsigned int Op,
928 unsigned int Type)
929 {
930 pkgCache::GrpIterator Grp;
931 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
932 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
933 return false;
934
935 // Locate the target package
936 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
937 // we don't create 'none' packages and their dependencies if we can avoid it …
938 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
939 return true;
940 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
941 if (Pkg.end() == true) {
942 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
943 return false;
944 }
945
946 // Is it a file dependency?
947 if (unlikely(PackageName[0] == '/'))
948 FoundFileDeps = true;
949
950 /* Caching the old end point speeds up generation substantially */
951 if (OldDepVer != Ver) {
952 OldDepLast = NULL;
953 OldDepVer = Ver;
954 }
955
956 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
957 }
958 /*}}}*/
959 // ListParser::NewProvides - Create a Provides element /*{{{*/
960 // ---------------------------------------------------------------------
961 /* */
962 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
963 const string &PkgName,
964 const string &PkgArch,
965 const string &Version)
966 {
967 pkgCache &Cache = Owner->Cache;
968
969 // We do not add self referencing provides
970 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
971 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
972 return true;
973
974 // Get a structure
975 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
976 if (unlikely(Provides == 0))
977 return false;
978 Cache.HeaderP->ProvidesCount++;
979
980 // Fill it in
981 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
982 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
983 Prv->Version = Ver.Index();
984 Prv->NextPkgProv = Ver->ProvidesList;
985 Ver->ProvidesList = Prv.Index();
986 if (Version.empty() == false) {
987 map_ptrloc const idxProvideVersion = WriteString(Version);
988 Prv->ProvideVersion = idxProvideVersion;
989 if (unlikely(idxProvideVersion == 0))
990 return false;
991 }
992
993 // Locate the target package
994 pkgCache::PkgIterator Pkg;
995 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
996 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
997 return false;
998
999 // Link it to the package
1000 Prv->ParentPkg = Pkg.Index();
1001 Prv->NextProvides = Pkg->ProvidesList;
1002 Pkg->ProvidesList = Prv.Index();
1003
1004 return true;
1005 }
1006 /*}}}*/
1007 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1008 // ---------------------------------------------------------------------
1009 /* This is used to select which file is to be associated with all newly
1010 added versions. The caller is responsible for setting the IMS fields. */
1011 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1012 const pkgIndexFile &Index,
1013 unsigned long Flags)
1014 {
1015 // Get some space for the structure
1016 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1017 if (unlikely(idxFile == 0))
1018 return false;
1019 CurrentFile = Cache.PkgFileP + idxFile;
1020
1021 // Fill it in
1022 map_ptrloc const idxFileName = WriteStringInMap(File);
1023 map_ptrloc const idxSite = WriteUniqString(Site);
1024 if (unlikely(idxFileName == 0 || idxSite == 0))
1025 return false;
1026 CurrentFile->FileName = idxFileName;
1027 CurrentFile->Site = idxSite;
1028 CurrentFile->NextFile = Cache.HeaderP->FileList;
1029 CurrentFile->Flags = Flags;
1030 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1031 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1032 if (unlikely(idxIndexType == 0))
1033 return false;
1034 CurrentFile->IndexType = idxIndexType;
1035 PkgFileName = File;
1036 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1037 Cache.HeaderP->PackageFileCount++;
1038
1039 if (Progress != 0)
1040 Progress->SubProgress(Index.Size());
1041 return true;
1042 }
1043 /*}}}*/
1044 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1045 // ---------------------------------------------------------------------
1046 /* This is used to create handles to strings. Given the same text it
1047 always returns the same number */
1048 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1049 unsigned int Size)
1050 {
1051 /* We use a very small transient hash table here, this speeds up generation
1052 by a fair amount on slower machines */
1053 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1054 if (Bucket != 0 &&
1055 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1056 return Bucket->String;
1057
1058 // Search for an insertion point
1059 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1060 int Res = 1;
1061 map_ptrloc *Last = &Cache.HeaderP->StringList;
1062 for (; I != Cache.StringItemP; Last = &I->NextItem,
1063 I = Cache.StringItemP + I->NextItem)
1064 {
1065 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1066 if (Res >= 0)
1067 break;
1068 }
1069
1070 // Match
1071 if (Res == 0)
1072 {
1073 Bucket = I;
1074 return I->String;
1075 }
1076
1077 // Get a structure
1078 void const * const oldMap = Map.Data();
1079 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1080 if (Item == 0)
1081 return 0;
1082
1083 map_ptrloc const idxString = WriteStringInMap(S,Size);
1084 if (unlikely(idxString == 0))
1085 return 0;
1086 if (oldMap != Map.Data()) {
1087 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1088 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1089 }
1090 *Last = Item;
1091
1092 // Fill in the structure
1093 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1094 ItemP->NextItem = I - Cache.StringItemP;
1095 ItemP->String = idxString;
1096
1097 Bucket = ItemP;
1098 return ItemP->String;
1099 }
1100 /*}}}*/
1101 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1102 // ---------------------------------------------------------------------
1103 /* This just verifies that each file in the list of index files exists,
1104 has matching attributes with the cache and the cache does not have
1105 any extra files. */
1106 static bool CheckValidity(const string &CacheFile,
1107 pkgSourceList &List,
1108 FileIterator Start,
1109 FileIterator End,
1110 MMap **OutMap = 0)
1111 {
1112 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1113 // No file, certainly invalid
1114 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1115 {
1116 if (Debug == true)
1117 std::clog << "CacheFile doesn't exist" << std::endl;
1118 return false;
1119 }
1120
1121 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1122 {
1123 if (Debug == true)
1124 std::clog << "sources.list is newer than the cache" << std::endl;
1125 return false;
1126 }
1127
1128 // Map it
1129 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1130 SPtr<MMap> Map = new MMap(CacheF,0);
1131 pkgCache Cache(Map);
1132 if (_error->PendingError() == true || Map->Size() == 0)
1133 {
1134 if (Debug == true)
1135 std::clog << "Errors are pending or Map is empty()" << std::endl;
1136 _error->Discard();
1137 return false;
1138 }
1139
1140 /* Now we check every index file, see if it is in the cache,
1141 verify the IMS data and check that it is on the disk too.. */
1142 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1143 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1144 for (; Start != End; ++Start)
1145 {
1146 if (Debug == true)
1147 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1148 if ((*Start)->HasPackages() == false)
1149 {
1150 if (Debug == true)
1151 std::clog << "Has NO packages" << std::endl;
1152 continue;
1153 }
1154
1155 if ((*Start)->Exists() == false)
1156 {
1157 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1158 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1159 (*Start)->Describe().c_str());
1160 #endif
1161 if (Debug == true)
1162 std::clog << "file doesn't exist" << std::endl;
1163 continue;
1164 }
1165
1166 // FindInCache is also expected to do an IMS check.
1167 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1168 if (File.end() == true)
1169 {
1170 if (Debug == true)
1171 std::clog << "FindInCache returned end-Pointer" << std::endl;
1172 return false;
1173 }
1174
1175 Visited[File->ID] = true;
1176 if (Debug == true)
1177 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1178 }
1179
1180 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1181 if (Visited[I] == false)
1182 {
1183 if (Debug == true)
1184 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1185 return false;
1186 }
1187
1188 if (_error->PendingError() == true)
1189 {
1190 if (Debug == true)
1191 {
1192 std::clog << "Validity failed because of pending errors:" << std::endl;
1193 _error->DumpErrors();
1194 }
1195 _error->Discard();
1196 return false;
1197 }
1198
1199 if (OutMap != 0)
1200 *OutMap = Map.UnGuard();
1201 return true;
1202 }
1203 /*}}}*/
1204 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1205 // ---------------------------------------------------------------------
1206 /* Size is kind of an abstract notion that is only used for the progress
1207 meter */
1208 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1209 {
1210 unsigned long TotalSize = 0;
1211 for (; Start != End; ++Start)
1212 {
1213 if ((*Start)->HasPackages() == false)
1214 continue;
1215 TotalSize += (*Start)->Size();
1216 }
1217 return TotalSize;
1218 }
1219 /*}}}*/
1220 // BuildCache - Merge the list of index files into the cache /*{{{*/
1221 // ---------------------------------------------------------------------
1222 /* */
1223 static bool BuildCache(pkgCacheGenerator &Gen,
1224 OpProgress *Progress,
1225 unsigned long &CurrentSize,unsigned long TotalSize,
1226 FileIterator Start, FileIterator End)
1227 {
1228 FileIterator I;
1229 for (I = Start; I != End; ++I)
1230 {
1231 if ((*I)->HasPackages() == false)
1232 continue;
1233
1234 if ((*I)->Exists() == false)
1235 continue;
1236
1237 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1238 {
1239 _error->Warning("Duplicate sources.list entry %s",
1240 (*I)->Describe().c_str());
1241 continue;
1242 }
1243
1244 unsigned long Size = (*I)->Size();
1245 if (Progress != NULL)
1246 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1247 CurrentSize += Size;
1248
1249 if ((*I)->Merge(Gen,Progress) == false)
1250 return false;
1251 }
1252
1253 if (Gen.HasFileDeps() == true)
1254 {
1255 if (Progress != NULL)
1256 Progress->Done();
1257 TotalSize = ComputeSize(Start, End);
1258 CurrentSize = 0;
1259 for (I = Start; I != End; ++I)
1260 {
1261 unsigned long Size = (*I)->Size();
1262 if (Progress != NULL)
1263 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1264 CurrentSize += Size;
1265 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1266 return false;
1267 }
1268 }
1269
1270 return true;
1271 }
1272 /*}}}*/
1273 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1274 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1275 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1276 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1277 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1278 Flags |= MMap::Moveable;
1279 if (_config->FindB("APT::Cache-Fallback", false) == true)
1280 Flags |= MMap::Fallback;
1281 if (CacheF != NULL)
1282 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1283 else
1284 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1285 }
1286 /*}}}*/
1287 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1288 // ---------------------------------------------------------------------
1289 /* This makes sure that the status cache (the cache that has all
1290 index files from the sources list and all local ones) is ready
1291 to be mmaped. If OutMap is not zero then a MMap object representing
1292 the cache will be stored there. This is pretty much mandetory if you
1293 are using AllowMem. AllowMem lets the function be run as non-root
1294 where it builds the cache 'fast' into a memory buffer. */
1295 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1296 MMap **OutMap, bool AllowMem)
1297 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1298 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1299 MMap **OutMap,bool AllowMem)
1300 {
1301 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1302
1303 std::vector<pkgIndexFile *> Files;
1304 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1305 i != List.end();
1306 ++i)
1307 {
1308 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1309 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1310 j != Indexes->end();
1311 ++j)
1312 Files.push_back (*j);
1313 }
1314
1315 unsigned long const EndOfSource = Files.size();
1316 if (_system->AddStatusFiles(Files) == false)
1317 return false;
1318
1319 // Decide if we can write to the files..
1320 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1321 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1322
1323 // ensure the cache directory exists
1324 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1325 {
1326 string dir = _config->FindDir("Dir::Cache");
1327 size_t const len = dir.size();
1328 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1329 dir = dir.substr(0, len - 5);
1330 if (CacheFile.empty() == false)
1331 CreateDirectory(dir, flNotFile(CacheFile));
1332 if (SrcCacheFile.empty() == false)
1333 CreateDirectory(dir, flNotFile(SrcCacheFile));
1334 }
1335
1336 // Decide if we can write to the cache
1337 bool Writeable = false;
1338 if (CacheFile.empty() == false)
1339 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1340 else
1341 if (SrcCacheFile.empty() == false)
1342 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1343 if (Debug == true)
1344 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1345
1346 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1347 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1348
1349 if (Progress != NULL)
1350 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1351
1352 // Cache is OK, Fin.
1353 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1354 {
1355 if (Progress != NULL)
1356 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1357 if (Debug == true)
1358 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1359 return true;
1360 }
1361 else if (Debug == true)
1362 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1363
1364 /* At this point we know we need to reconstruct the package cache,
1365 begin. */
1366 SPtr<FileFd> CacheF;
1367 SPtr<DynamicMMap> Map;
1368 if (Writeable == true && CacheFile.empty() == false)
1369 {
1370 _error->PushToStack();
1371 unlink(CacheFile.c_str());
1372 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1373 fchmod(CacheF->Fd(),0644);
1374 Map = CreateDynamicMMap(CacheF, MMap::Public);
1375 if (_error->PendingError() == true)
1376 {
1377 delete CacheF.UnGuard();
1378 delete Map.UnGuard();
1379 if (Debug == true)
1380 std::clog << "Open filebased MMap FAILED" << std::endl;
1381 Writeable = false;
1382 if (AllowMem == false)
1383 {
1384 _error->MergeWithStack();
1385 return false;
1386 }
1387 _error->RevertToStack();
1388 }
1389 else
1390 {
1391 _error->MergeWithStack();
1392 if (Debug == true)
1393 std::clog << "Open filebased MMap" << std::endl;
1394 }
1395 }
1396 if (Writeable == false || CacheFile.empty() == true)
1397 {
1398 // Just build it in memory..
1399 Map = CreateDynamicMMap(NULL);
1400 if (Debug == true)
1401 std::clog << "Open memory Map (not filebased)" << std::endl;
1402 }
1403
1404 // Lets try the source cache.
1405 unsigned long CurrentSize = 0;
1406 unsigned long TotalSize = 0;
1407 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1408 Files.begin()+EndOfSource) == true)
1409 {
1410 if (Debug == true)
1411 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1412 // Preload the map with the source cache
1413 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1414 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1415 if ((alloc == 0 && _error->PendingError())
1416 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1417 SCacheF.Size()) == false)
1418 return false;
1419
1420 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1421
1422 // Build the status cache
1423 pkgCacheGenerator Gen(Map.Get(),Progress);
1424 if (_error->PendingError() == true)
1425 return false;
1426 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1427 Files.begin()+EndOfSource,Files.end()) == false)
1428 return false;
1429 }
1430 else
1431 {
1432 if (Debug == true)
1433 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1434 TotalSize = ComputeSize(Files.begin(),Files.end());
1435
1436 // Build the source cache
1437 pkgCacheGenerator Gen(Map.Get(),Progress);
1438 if (_error->PendingError() == true)
1439 return false;
1440 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1441 Files.begin(),Files.begin()+EndOfSource) == false)
1442 return false;
1443
1444 // Write it back
1445 if (Writeable == true && SrcCacheFile.empty() == false)
1446 {
1447 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1448 if (_error->PendingError() == true)
1449 return false;
1450
1451 fchmod(SCacheF.Fd(),0644);
1452
1453 // Write out the main data
1454 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1455 return _error->Error(_("IO Error saving source cache"));
1456 SCacheF.Sync();
1457
1458 // Write out the proper header
1459 Gen.GetCache().HeaderP->Dirty = false;
1460 if (SCacheF.Seek(0) == false ||
1461 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1462 return _error->Error(_("IO Error saving source cache"));
1463 Gen.GetCache().HeaderP->Dirty = true;
1464 SCacheF.Sync();
1465 }
1466
1467 // Build the status cache
1468 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1469 Files.begin()+EndOfSource,Files.end()) == false)
1470 return false;
1471 }
1472 if (Debug == true)
1473 std::clog << "Caches are ready for shipping" << std::endl;
1474
1475 if (_error->PendingError() == true)
1476 return false;
1477 if (OutMap != 0)
1478 {
1479 if (CacheF != 0)
1480 {
1481 delete Map.UnGuard();
1482 *OutMap = new MMap(*CacheF,0);
1483 }
1484 else
1485 {
1486 *OutMap = Map.UnGuard();
1487 }
1488 }
1489
1490 return true;
1491 }
1492 /*}}}*/
1493 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1494 // ---------------------------------------------------------------------
1495 /* */
1496 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1497 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1498 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1499 {
1500 std::vector<pkgIndexFile *> Files;
1501 unsigned long EndOfSource = Files.size();
1502 if (_system->AddStatusFiles(Files) == false)
1503 return false;
1504
1505 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1506 unsigned long CurrentSize = 0;
1507 unsigned long TotalSize = 0;
1508
1509 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1510
1511 // Build the status cache
1512 if (Progress != NULL)
1513 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1514 pkgCacheGenerator Gen(Map.Get(),Progress);
1515 if (_error->PendingError() == true)
1516 return false;
1517 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1518 Files.begin()+EndOfSource,Files.end()) == false)
1519 return false;
1520
1521 if (_error->PendingError() == true)
1522 return false;
1523 *OutMap = Map.UnGuard();
1524
1525 return true;
1526 }
1527 /*}}}*/
1528 // IsDuplicateDescription /*{{{*/
1529 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1530 MD5SumValue const &CurMd5, std::string const &CurLang)
1531 {
1532 // Descriptions in the same link-list have all the same md5
1533 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1534 return false;
1535 for (; Desc.end() == false; ++Desc)
1536 if (Desc.LanguageCode() == CurLang)
1537 return true;
1538 return false;
1539 }
1540 /*}}}*/
1541 // CacheGenerator::FinishCache /*{{{*/
1542 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1543 {
1544 return true;
1545 }
1546 /*}}}*/