summaryrefslogtreecommitdiff
path: root/meta/classes/externalsrc.bbclass
blob: bdf23ec6be2bd73af4659ea39ab2cea734cef8bf (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
# Copyright (C) 2012 Linux Foundation
# Author: Richard Purdie
# Some code and influence taken from srctree.bbclass:
# Copyright (C) 2009 Chris Larson <clarson@kergoth.com>
# Released under the MIT license (see COPYING.MIT for the terms)
#
# externalsrc.bbclass enables use of an existing source tree, usually external to
# the build system to build a piece of software rather than the usual fetch/unpack/patch
# process.
#
# To use, add externalsrc to the global inherit and set EXTERNALSRC to point at the
# directory you want to use containing the sources e.g. from local.conf for a recipe
# called "myrecipe" you would do:
#
# INHERIT += "externalsrc"
# EXTERNALSRC_pn-myrecipe = "/path/to/my/source/tree"
#
# In order to make this class work for both target and native versions (or with
# multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate
# directory under the work directory (split source and build directories). This is
# the default, but the build directory can be set to the source directory if
# circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.:
#
# EXTERNALSRC_BUILD_pn-myrecipe = "/path/to/my/source/tree"
#

SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}"

python () {
    externalsrc = d.getVar('EXTERNALSRC')

    # If this is the base recipe and EXTERNALSRC is set for it or any of its
    # derivatives, then enable BB_DONT_CACHE to force the recipe to always be
    # re-parsed so that the file-checksums function for do_compile is run every
    # time.
    bpn = d.getVar('BPN')
    if bpn == d.getVar('PN'):
        classextend = (d.getVar('BBCLASSEXTEND') or '').split()
        if (externalsrc or
                ('native' in classextend and
                 d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or
                ('nativesdk' in classextend and
                 d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or
                ('cross' in classextend and
                 d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))):
            d.setVar('BB_DONT_CACHE', '1')

    if externalsrc:
        d.setVar('S', externalsrc)
        externalsrcbuild = d.getVar('EXTERNALSRC_BUILD')
        if externalsrcbuild:
            d.setVar('B', externalsrcbuild)
        else:
            d.setVar('B', '${WORKDIR}/${BPN}-${PV}/')

        local_srcuri = []
        fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
        for url in fetch.urls:
            url_data = fetch.ud[url]
            parm = url_data.parm
            if (url_data.type == 'file' or
                    'type' in parm and parm['type'] == 'kmeta'):
                local_srcuri.append(url)

        d.setVar('SRC_URI', ' '.join(local_srcuri))

        if '{SRCPV}' in d.getVar('PV', False):
            # Dummy value because the default function can't be called with blank SRC_URI
            d.setVar('SRCPV', '999')

        tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys())

        for task in tasks:
            if task.endswith("_setscene"):
                # sstate is never going to work for external source trees, disable it
                bb.build.deltask(task, d)
            else:
                # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time
                d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock")

            # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean)
            cleandirs = (d.getVarFlag(task, 'cleandirs', False) or '').split()
            setvalue = False
            for cleandir in cleandirs[:]:
                if d.expand(cleandir) == externalsrc:
                    cleandirs.remove(cleandir)
                    setvalue = True
            if setvalue:
                d.setVarFlag(task, 'cleandirs', ' '.join(cleandirs))

        fetch_tasks = ['do_fetch', 'do_unpack']
        # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one
        # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
        d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])

        for task in d.getVar("SRCTREECOVEREDTASKS").split():
            if local_srcuri and task in fetch_tasks:
                continue
            bb.build.deltask(task, d)

        d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ")
        d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ")

        d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
        d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')

        # We don't want the workdir to go away
        d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))

        bb.build.addtask('do_buildclean',
                         'do_clean' if d.getVar('S') == d.getVar('B') else None,
                         None, d)

        # If B=S the same builddir is used even for different architectures.
        # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that
        # change of do_configure task hash is correctly detected and stamps are
        # invalidated if e.g. MACHINE changes.
        if d.getVar('S') == d.getVar('B'):
            configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate'
            d.setVar('CONFIGURESTAMPFILE', configstamp)
            d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}')
            d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*')
}

python externalsrc_configure_prefunc() {
    # Create desired symlinks
    symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()
    for symlink in symlinks:
        symsplit = symlink.split(':', 1)
        lnkfile = os.path.join(d.getVar('S'), symsplit[0])
        target = d.expand(symsplit[1])
        if len(symsplit) > 1:
            if os.path.islink(lnkfile):
                # Link already exists, leave it if it points to the right location already
                if os.readlink(lnkfile) == target:
                    continue
                os.unlink(lnkfile)
            elif os.path.exists(lnkfile):
                # File/dir exists with same name as link, just leave it alone
                continue
            os.symlink(target, lnkfile)
}

python externalsrc_compile_prefunc() {
    # Make it obvious that this is happening, since forgetting about it could lead to much confusion
    bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC')))
}

do_buildclean[dirs] = "${S} ${B}"
do_buildclean[nostamp] = "1"
do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}"
externalsrc_do_buildclean() {
	if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
		oe_runmake clean || die "make failed"
	else
		bbnote "nothing to do - no makefile found"
	fi
}

def srctree_hash_files(d, srcdir=None):
    import shutil
    import subprocess
    import tempfile

    s_dir = srcdir or d.getVar('EXTERNALSRC')
    git_dir = os.path.join(s_dir, '.git')
    oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1')

    ret = " "
    if os.path.exists(git_dir):
        with tempfile.NamedTemporaryFile(dir=git_dir, prefix='oe-devtool-index') as tmp_index:
            # Clone index
            shutil.copy2(os.path.join(git_dir, 'index'), tmp_index.name)
            # Update our custom index
            env = os.environ.copy()
            env['GIT_INDEX_FILE'] = tmp_index.name
            subprocess.check_output(['git', 'add', '.'], cwd=s_dir, env=env)
            sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
        with open(oe_hash_file, 'w') as fobj:
            fobj.write(sha1)
        ret = oe_hash_file + ':True'
    else:
        ret = s_dir + '/*:True'
    return ret

def srctree_configure_hash_files(d):
    """
    Get the list of files that should trigger do_configure to re-execute,
    based on the value of CONFIGURE_FILES
    """
    in_files = (d.getVar('CONFIGURE_FILES') or '').split()
    out_items = []
    search_files = []
    for entry in in_files:
        if entry.startswith('/'):
            out_items.append('%s:%s' % (entry, os.path.exists(entry)))
        else:
            search_files.append(entry)
    if search_files:
        s_dir = d.getVar('EXTERNALSRC')
        for root, _, files in os.walk(s_dir):
            for f in files:
                if f in search_files:
                    out_items.append('%s:True' % os.path.join(root, f))
    return ' '.join(out_items)

EXPORT_FUNCTIONS do_buildclean