aboutsummaryrefslogtreecommitdiff
path: root/build-scripts/fetch_prebuilt.py
blob: 1c89806d8b4dc8b9dea3f163bf7c7b596eb27f4b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
#!/usr/bin/python

import json
import urlparse
import shutil
import urllib2
import os
import sys


def download(api_urls, files_to_get):
    """Example of how to use the API to download a/all files in a directory."""

    # Get listing for file(s) pointed to by URL we were given
    request = urllib2.urlopen(api_urls.ls())
    listing = json.loads(request.read())["files"]

    for file_info in listing:

        if file_info["type"] == "folder":
            # Skip folders...
            continue
        elif not file_info["name"] in files_to_get:
            # only grab the specified files
            continue

        # Get the licenses. They are returned as a JSON document in the form:
        # {"licenses":
        #  [{"text": "<license text>", "digest": "<digest of license>"},
        #   {"text": "<license text>", "digest": "<digest of license>"},
        #   ...
        # ]}
        # Each license has a digest associated with it.
        request = urllib2.urlopen(api_urls.license(file_info["url"]))
        licenses = json.loads(request.read())["licenses"]

        if licenses[0] == "Open":
            headers = {}
        else:
            # Dont download any licensed files
            continue

        # Once the header has been generated, just download the file.
        req = urllib2.urlopen(urllib2.Request(api_urls.file(file_info["url"]),
                                              headers=headers))
        with open(os.path.basename(file_info["url"]), 'wb') as fp:
            shutil.copyfileobj(req, fp)


class ApiUrls():
    """Since we want to manipulate URLS, but urlsplit returns an immutable
    object this is a convenience object to perform the manipulations for us"""
    def __init__(self, input_url):
        self.parsed_url = [c for c in urlparse.urlsplit(input_url)]
        self.path = self.parsed_url[2]

    def ls(self, path=None):
        if not path:
            path = self.path
        self.parsed_url[2] = "/api/ls" + path
        return urlparse.urlunsplit(self.parsed_url)

    def license(self, path):
        self.parsed_url[2] = "/api/license" + path
        return urlparse.urlunsplit(self.parsed_url)

    def file(self, path):
        self.parsed_url[2] = path
        return urlparse.urlunsplit(self.parsed_url)


if __name__ == '__main__':
    if len(sys.argv) != 2:
    # Check that a URL has been supplied.
        print >> sys.stderr, "Usage: fetch_prebuilt.py <URL>"
        exit(1)

    api_urls = ApiUrls(sys.argv[1])

    # we are only interested in the fs images
    files_to_get = "boot.tar.bz2", "system.tar.bz2", "userdata.tar.bz2"

    download(api_urls, files_to_get)