1
1
import json
2
2
from pathlib import Path
3
- from typing import Dict , List , Optional
3
+ from typing import Collection , Dict , List , Optional , Union
4
4
5
5
import requests
6
6
from dataclassy import dataclass
9
9
from ape .exceptions import ProjectError
10
10
from ape .managers .networks import NetworkManager
11
11
from ape .types import Checksum , Compiler , ContractType , PackageManifest , Source
12
- from ape .utils import compute_checksum
12
+ from ape .utils import compute_checksum , get_all_files_in_directory , github_client
13
13
14
14
from .compilers import CompilerManager
15
15
from .config import ConfigManager
16
16
17
17
18
+ def _create_source_dict (contracts_paths : Collection [Path ]) -> Dict [str , Source ]:
19
+ return {
20
+ str (source ): Source ( # type: ignore
21
+ checksum = Checksum ( # type: ignore
22
+ algorithm = "md5" , hash = compute_checksum (source .read_bytes ())
23
+ ),
24
+ urls = [],
25
+ )
26
+ for source in contracts_paths
27
+ }
28
+
29
+
18
30
@dataclass
19
31
class ProjectManager :
20
32
path : Path
@@ -29,17 +41,57 @@ def __post_init__(self):
29
41
self .path = Path (self .path )
30
42
31
43
self .dependencies = {
32
- manifest .name : manifest
33
- for manifest in map (self ._extract_manifest , self .config .dependencies )
44
+ n : self ._extract_manifest (n , dep_id ) for n , dep_id in self .config .dependencies .items ()
34
45
}
35
46
36
- def _extract_manifest (self , manifest_uri : str ) -> PackageManifest :
37
- manifest_dict = requests .get (manifest_uri ).json ()
38
- # TODO: Handle non-manifest URLs e.g. Ape/Brownie projects, Hardhat/Truffle projects, etc.
39
- if "name" not in manifest_dict :
40
- raise ProjectError ("Dependencies must have a name." )
41
-
42
- return PackageManifest .from_dict (manifest_dict )
47
+ def _extract_manifest (self , name : str , download_path : str ) -> PackageManifest :
48
+ packages_path = self .config .DATA_FOLDER / "packages"
49
+ packages_path .mkdir (exist_ok = True , parents = True )
50
+ target_path = packages_path / name
51
+ target_path .mkdir (exist_ok = True , parents = True )
52
+
53
+ if download_path .startswith ("https://" ) or download_path .startswith ("http://" ):
54
+ manifest_file_path = target_path / "manifest.json"
55
+ if manifest_file_path .exists ():
56
+ manifest_dict = json .loads (manifest_file_path .read_text ())
57
+ else :
58
+ # Download manifest
59
+ response = requests .get (download_path )
60
+ manifest_file_path .write_text (response .text )
61
+ manifest_dict = response .json ()
62
+
63
+ if "name" not in manifest_dict :
64
+ raise ProjectError ("Dependencies must have a name." )
65
+
66
+ return PackageManifest .from_dict (manifest_dict )
67
+ else :
68
+ # Github dependency (format: <org>/<repo>@<version>)
69
+ try :
70
+ path , version = download_path .split ("@" )
71
+ except ValueError :
72
+ raise ValueError ("Invalid Github ID. Must be given as <org>/<repo>@<version>" )
73
+
74
+ package_contracts_path = target_path / "contracts"
75
+ is_cached = len ([p for p in target_path .iterdir ()]) > 0
76
+
77
+ if not is_cached :
78
+ github_client .download_package (path , version , target_path )
79
+
80
+ if not package_contracts_path .exists ():
81
+ raise ProjectError (
82
+ "Dependency does not have a support structure. Expecting 'contracts/' path."
83
+ )
84
+
85
+ manifest = PackageManifest ()
86
+ sources = [
87
+ s
88
+ for s in get_all_files_in_directory (package_contracts_path )
89
+ if s .name not in ("package.json" , "package-lock.json" )
90
+ and s .suffix in self .compilers .registered_compilers
91
+ ]
92
+ manifest .sources = _create_source_dict (sources )
93
+ manifest .contractTypes = self .compilers .compile (sources )
94
+ return manifest
43
95
44
96
def __str__ (self ) -> str :
45
97
return f'Project("{ self .path } ")'
@@ -146,8 +198,11 @@ def find_in_dir(dir_path: Path) -> Optional[Path]:
146
198
return find_in_dir (self .contracts_folder )
147
199
148
200
def load_contracts (
149
- self , file_paths : Optional [List [Path ]] = None , use_cache : bool = True
201
+ self , file_paths : Optional [Union [ List [Path ], Path ]] = None , use_cache : bool = True
150
202
) -> Dict [str , ContractType ]:
203
+ if isinstance (file_paths , Path ):
204
+ file_paths = [file_paths ]
205
+
151
206
# Load a cached or clean manifest (to use for caching)
152
207
manifest = use_cache and self .cached_manifest or PackageManifest ()
153
208
cached_sources = manifest .sources or {}
@@ -190,16 +245,7 @@ def file_needs_compiling(source: Path) -> bool:
190
245
191
246
# Update cached contract types & source code entries in cached manifest
192
247
manifest .contractTypes = contract_types
193
- cached_sources = {
194
- str (source ): Source ( # type: ignore
195
- checksum = Checksum ( # type: ignore
196
- algorithm = "md5" , hash = compute_checksum (source .read_bytes ())
197
- ),
198
- urls = [],
199
- )
200
- for source in sources
201
- }
202
- manifest .sources = cached_sources
248
+ manifest .sources = _create_source_dict (sources )
203
249
204
250
# NOTE: Cache the updated manifest to disk (so ``self.cached_manifest`` reads next time)
205
251
self .manifest_cachefile .write_text (json .dumps (manifest .to_dict ()))
0 commit comments