mirror of
https://git.yoctoproject.org/poky-contrib
synced 2025-05-08 23:52:25 +08:00

This adds the SPDX-License-Identifier license headers to the majority of our source files to make it clearer exactly which license files are under. The bulk of the files are under GPL v2.0 with one found to be under V2.0 or later, some under MIT and some have dual license. There are some files which are potentially harder to classify where we've imported upstream code and those can be handled specifically in later commits. The COPYING file is replaced with LICENSE.X files which contain the full license texts. (Bitbake rev: ff237c33337f4da2ca06c3a2c49699bc26608a6b) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
119 lines
3.9 KiB
Python
119 lines
3.9 KiB
Python
"""
|
|
BitBake 'remotedata' module
|
|
|
|
Provides support for using a datastore from the bitbake client
|
|
"""
|
|
|
|
# Copyright (C) 2016 Intel Corporation
|
|
#
|
|
# SPDX-License-Identifier: GPL-2.0-only
|
|
#
|
|
# This program is free software; you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
# published by the Free Software Foundation.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License along
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
|
|
import bb.data
|
|
|
|
class RemoteDatastores:
|
|
"""Used on the server side to manage references to server-side datastores"""
|
|
def __init__(self, cooker):
|
|
self.cooker = cooker
|
|
self.datastores = {}
|
|
self.locked = []
|
|
self.nextindex = 1
|
|
|
|
def __len__(self):
|
|
return len(self.datastores)
|
|
|
|
def __getitem__(self, key):
|
|
if key is None:
|
|
return self.cooker.data
|
|
else:
|
|
return self.datastores[key]
|
|
|
|
def items(self):
|
|
return self.datastores.items()
|
|
|
|
def store(self, d, locked=False):
|
|
"""
|
|
Put a datastore into the collection. If locked=True then the datastore
|
|
is understood to be managed externally and cannot be released by calling
|
|
release().
|
|
"""
|
|
idx = self.nextindex
|
|
self.datastores[idx] = d
|
|
if locked:
|
|
self.locked.append(idx)
|
|
self.nextindex += 1
|
|
return idx
|
|
|
|
def check_store(self, d, locked=False):
|
|
"""
|
|
Put a datastore into the collection if it's not already in there;
|
|
in either case return the index
|
|
"""
|
|
for key, val in self.datastores.items():
|
|
if val is d:
|
|
idx = key
|
|
break
|
|
else:
|
|
idx = self.store(d, locked)
|
|
return idx
|
|
|
|
def release(self, idx):
|
|
"""Discard a datastore in the collection"""
|
|
if idx in self.locked:
|
|
raise Exception('Tried to release locked datastore %d' % idx)
|
|
del self.datastores[idx]
|
|
|
|
def receive_datastore(self, remote_data):
|
|
"""Receive a datastore object sent from the client (as prepared by transmit_datastore())"""
|
|
dct = dict(remote_data)
|
|
d = bb.data_smart.DataSmart()
|
|
d.dict = dct
|
|
while True:
|
|
if '_remote_data' in dct:
|
|
dsindex = dct['_remote_data']['_content']
|
|
del dct['_remote_data']
|
|
if dsindex is None:
|
|
dct['_data'] = self.cooker.data.dict
|
|
else:
|
|
dct['_data'] = self.datastores[dsindex].dict
|
|
break
|
|
elif '_data' in dct:
|
|
idct = dict(dct['_data'])
|
|
dct['_data'] = idct
|
|
dct = idct
|
|
else:
|
|
break
|
|
return d
|
|
|
|
@staticmethod
|
|
def transmit_datastore(d):
|
|
"""Prepare a datastore object for sending over IPC from the client end"""
|
|
# FIXME content might be a dict, need to turn that into a list as well
|
|
def copy_dicts(dct):
|
|
if '_remote_data' in dct:
|
|
dsindex = dct['_remote_data']['_content'].dsindex
|
|
newdct = dct.copy()
|
|
newdct['_remote_data'] = {'_content': dsindex}
|
|
return list(newdct.items())
|
|
elif '_data' in dct:
|
|
newdct = dct.copy()
|
|
newdata = copy_dicts(dct['_data'])
|
|
if newdata:
|
|
newdct['_data'] = newdata
|
|
return list(newdct.items())
|
|
return None
|
|
main_dict = copy_dicts(d.dict)
|
|
return main_dict
|