diff options
author | vvvv <vvvv@yandex-team.com> | 2024-11-08 19:53:36 +0300 |
---|---|---|
committer | Vitaly Stoyan <vvvv@ydb.tech> | 2024-11-09 12:01:35 +0300 |
commit | 3d5bcb1e40a790292e69c6d3ba03c530f9564454 (patch) | |
tree | b0893c7fba5b9cd9a1740c05083c7b9c82658ca6 | |
parent | 84f7474e59875188f5bbde680babe0398bd5ce01 (diff) | |
download | ydb-3d5bcb1e40a790292e69c6d3ba03c530f9564454.tar.gz |
less ydb deps or disable for OSS for now
commit_hash:cba3283f48e43daee374c408d91d1fed33861da4
-rw-r--r-- | contrib/python/patch/.dist-info/METADATA | 12 | ||||
-rw-r--r-- | contrib/python/patch/.dist-info/top_level.txt | 1 | ||||
-rw-r--r-- | contrib/python/patch/patch.py | 1203 | ||||
-rw-r--r-- | contrib/python/patch/ya.make | 22 | ||||
-rw-r--r-- | yql/essentials/core/extract_predicate/ut/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/core/qplayer/storage/ydb/ut/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/core/qplayer/storage/ydb/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/core/ut/ya.make | 4 | ||||
-rw-r--r-- | yql/essentials/core/ut_common/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/minikql/mkql_node.cpp | 2 | ||||
-rw-r--r-- | yql/essentials/minikql/protobuf_udf/ut/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/parser/pg_wrapper/test/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/tools/exports.symlist | 1179 | ||||
-rw-r--r-- | yql/essentials/tools/pg-make-test/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/tools/pgrun/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/tools/purebench/ya.make | 5 | ||||
-rw-r--r-- | yql/essentials/tools/udf_probe/ya.make | 2 | ||||
-rw-r--r-- | yql/essentials/tools/udf_resolver/ya.make | 2 |
18 files changed, 1230 insertions, 1242 deletions
diff --git a/contrib/python/patch/.dist-info/METADATA b/contrib/python/patch/.dist-info/METADATA deleted file mode 100644 index 5b9a6b2ffd..0000000000 --- a/contrib/python/patch/.dist-info/METADATA +++ /dev/null @@ -1,12 +0,0 @@ -Metadata-Version: 2.1 -Name: patch -Version: 1.16 -Author: anatoly techtonik -Author-email: techtonik at gmail com -Home-page: https://github.com/techtonik/python-patch/ -Summary: Library to parse and apply unified diffs -License: MIT License - - -### Features - diff --git a/contrib/python/patch/.dist-info/top_level.txt b/contrib/python/patch/.dist-info/top_level.txt deleted file mode 100644 index 9eb7b90ed5..0000000000 --- a/contrib/python/patch/.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -patch diff --git a/contrib/python/patch/patch.py b/contrib/python/patch/patch.py deleted file mode 100644 index 45c5a48658..0000000000 --- a/contrib/python/patch/patch.py +++ /dev/null @@ -1,1203 +0,0 @@ -#!/usr/bin/env python -""" - Patch utility to apply unified diffs - - Brute-force line-by-line non-recursive parsing - - Copyright (c) 2008-2016 anatoly techtonik - Available under the terms of MIT license - - https://github.com/techtonik/python-patch/ - -""" -from __future__ import print_function - -__author__ = "anatoly techtonik <techtonik@gmail.com>" -__version__ = "1.16" - -import copy -import logging -import re - -# cStringIO doesn't support unicode in 2.5 -try: - from StringIO import StringIO -except ImportError: - from io import BytesIO as StringIO # python 3 -try: - import urllib2 as urllib_request -except ImportError: - import urllib.request as urllib_request - -from os.path import exists, isfile, abspath -import os -import posixpath -import shutil -import sys - - -PY3K = sys.version_info >= (3, 0) - -# PEP 3114 -if not PY3K: - compat_next = lambda gen: gen.next() -else: - compat_next = lambda gen: gen.__next__() - -def tostr(b): - """ Python 3 bytes encoder. Used to print filename in - diffstat output. Assumes that filenames are in utf-8. - """ - if not PY3K: - return b - - # [ ] figure out how to print non-utf-8 filenames without - # information loss - return b.decode('utf-8') - - -#------------------------------------------------ -# Logging is controlled by logger named after the -# module name (e.g. 'patch' for patch.py module) - -logger = logging.getLogger(__name__) - -debug = logger.debug -info = logger.info -warning = logger.warning - -class NullHandler(logging.Handler): - """ Copied from Python 2.7 to avoid getting - `No handlers could be found for logger "patch"` - http://bugs.python.org/issue16539 - """ - def handle(self, record): - pass - def emit(self, record): - pass - def createLock(self): - self.lock = None - -streamhandler = logging.StreamHandler() - -# initialize logger itself -logger.addHandler(NullHandler()) - -debugmode = False - -def setdebug(): - global debugmode, streamhandler - - debugmode = True - loglevel = logging.DEBUG - logformat = "%(levelname)8s %(message)s" - logger.setLevel(loglevel) - - if streamhandler not in logger.handlers: - # when used as a library, streamhandler is not added - # by default - logger.addHandler(streamhandler) - - streamhandler.setFormatter(logging.Formatter(logformat)) - - -#------------------------------------------------ -# Constants for Patch/PatchSet types - -DIFF = PLAIN = "plain" -GIT = "git" -HG = MERCURIAL = "mercurial" -SVN = SUBVERSION = "svn" -# mixed type is only actual when PatchSet contains -# Patches of different type -MIXED = MIXED = "mixed" - - -#------------------------------------------------ -# Helpers (these could come with Python stdlib) - -# x...() function are used to work with paths in -# cross-platform manner - all paths use forward -# slashes even on Windows. - -def xisabs(filename): - """ Cross-platform version of `os.path.isabs()` - Returns True if `filename` is absolute on - Linux, OS X or Windows. - """ - if filename.startswith(b'/'): # Linux/Unix - return True - elif filename.startswith(b'\\'): # Windows - return True - elif re.match(b'\\w:[\\\\/]', filename): # Windows - return True - return False - -def xnormpath(path): - """ Cross-platform version of os.path.normpath """ - # replace escapes and Windows slashes - normalized = posixpath.normpath(path).replace(b'\\', b'/') - # fold the result - return posixpath.normpath(normalized) - -def xstrip(filename): - """ Make relative path out of absolute by stripping - prefixes used on Linux, OS X and Windows. - - This function is critical for security. - """ - while xisabs(filename): - # strip windows drive with all slashes - if re.match(b'\\w:[\\\\/]', filename): - filename = re.sub(b'^\\w+:[\\\\/]+', b'', filename) - # strip all slashes - elif re.match(b'[\\\\/]', filename): - filename = re.sub(b'^[\\\\/]+', b'', filename) - return filename - -#----------------------------------------------- -# Main API functions - -def fromfile(filename): - """ Parse patch file. If successful, returns - PatchSet() object. Otherwise returns False. - """ - patchset = PatchSet() - debug("reading %s" % filename) - fp = open(filename, "rb") - res = patchset.parse(fp) - fp.close() - if res == True: - return patchset - return False - - -def fromstring(s): - """ Parse text string and return PatchSet() - object (or False if parsing fails) - """ - ps = PatchSet( StringIO(s) ) - if ps.errors == 0: - return ps - return False - - -def fromurl(url): - """ Parse patch from an URL, return False - if an error occured. Note that this also - can throw urlopen() exceptions. - """ - ps = PatchSet( urllib_request.urlopen(url) ) - if ps.errors == 0: - return ps - return False - - -# --- Utility functions --- -# [ ] reuse more universal pathsplit() -def pathstrip(path, n): - """ Strip n leading components from the given path """ - pathlist = [path] - while os.path.dirname(pathlist[0]) != b'': - pathlist[0:1] = os.path.split(pathlist[0]) - return b'/'.join(pathlist[n:]) -# --- /Utility function --- - - -class Hunk(object): - """ Parsed hunk data container (hunk starts with @@ -R +R @@) """ - - def __init__(self): - self.startsrc=None #: line count starts with 1 - self.linessrc=None - self.starttgt=None - self.linestgt=None - self.invalid=False - self.desc='' - self.text=[] - -# def apply(self, estream): -# """ write hunk data into enumerable stream -# return strings one by one until hunk is -# over -# -# enumerable stream are tuples (lineno, line) -# where lineno starts with 0 -# """ -# pass - - -class Patch(object): - """ Patch for a single file. - If used as an iterable, returns hunks. - """ - def __init__(self): - self.source = None - self.target = None - self.hunks = [] - self.hunkends = [] - self.header = [] - - self.type = None - - def __iter__(self): - for h in self.hunks: - yield h - - -class PatchSet(object): - """ PatchSet is a patch parser and container. - When used as an iterable, returns patches. - """ - - def __init__(self, stream=None): - # --- API accessible fields --- - - # name of the PatchSet (filename or ...) - self.name = None - # patch set type - one of constants - self.type = None - - # list of Patch objects - self.items = [] - - self.errors = 0 # fatal parsing errors - self.warnings = 0 # non-critical warnings - # --- /API --- - - if stream: - self.parse(stream) - - def __len__(self): - return len(self.items) - - def __iter__(self): - for i in self.items: - yield i - - def parse(self, stream): - """ parse unified diff - return True on success - """ - lineends = dict(lf=0, crlf=0, cr=0) - nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1 - - p = None - hunk = None - # hunkactual variable is used to calculate hunk lines for comparison - hunkactual = dict(linessrc=None, linestgt=None) - - - class wrapumerate(enumerate): - """Enumerate wrapper that uses boolean end of stream status instead of - StopIteration exception, and properties to access line information. - """ - - def __init__(self, *args, **kwargs): - # we don't call parent, it is magically created by __new__ method - - self._exhausted = False - self._lineno = False # after end of stream equal to the num of lines - self._line = False # will be reset to False after end of stream - - def next(self): - """Try to read the next line and return True if it is available, - False if end of stream is reached.""" - if self._exhausted: - return False - - try: - self._lineno, self._line = compat_next(super(wrapumerate, self)) - except StopIteration: - self._exhausted = True - self._line = False - return False - return True - - @property - def is_empty(self): - return self._exhausted - - @property - def line(self): - return self._line - - @property - def lineno(self): - return self._lineno - - # define states (possible file regions) that direct parse flow - headscan = True # start with scanning header - filenames = False # lines starting with --- and +++ - - hunkhead = False # @@ -R +R @@ sequence - hunkbody = False # - hunkskip = False # skipping invalid hunk mode - - hunkparsed = False # state after successfully parsed hunk - - # regexp to match start of hunk, used groups - 1,3,4,6 - re_hunk_start = re.compile(b"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@") - - self.errors = 0 - # temp buffers for header and filenames info - header = [] - srcname = None - tgtname = None - - # start of main cycle - # each parsing block already has line available in fe.line - fe = wrapumerate(stream) - while fe.next(): - - # -- deciders: these only switch state to decide who should process - # -- line fetched at the start of this cycle - if hunkparsed: - hunkparsed = False - if re_hunk_start.match(fe.line): - hunkhead = True - elif fe.line.startswith(b"--- "): - filenames = True - else: - headscan = True - # -- ------------------------------------ - - # read out header - if headscan: - while not fe.is_empty and not fe.line.startswith(b"--- "): - header.append(fe.line) - fe.next() - if fe.is_empty: - if p == None: - debug("no patch data found") # error is shown later - self.errors += 1 - else: - info("%d unparsed bytes left at the end of stream" % len(b''.join(header))) - self.warnings += 1 - # TODO check for \No new line at the end.. - # TODO test for unparsed bytes - # otherwise error += 1 - # this is actually a loop exit - continue - - headscan = False - # switch to filenames state - filenames = True - - line = fe.line - lineno = fe.lineno - - - # hunkskip and hunkbody code skipped until definition of hunkhead is parsed - if hunkbody: - # [x] treat empty lines inside hunks as containing single space - # (this happens when diff is saved by copy/pasting to editor - # that strips trailing whitespace) - if line.strip(b"\r\n") == b"": - debug("expanding empty line in a middle of hunk body") - self.warnings += 1 - line = b' ' + line - - # process line first - if re.match(b"^[- \\+\\\\]", line): - # gather stats about line endings - if line.endswith(b"\r\n"): - p.hunkends["crlf"] += 1 - elif line.endswith(b"\n"): - p.hunkends["lf"] += 1 - elif line.endswith(b"\r"): - p.hunkends["cr"] += 1 - - if line.startswith(b"-"): - hunkactual["linessrc"] += 1 - elif line.startswith(b"+"): - hunkactual["linestgt"] += 1 - elif not line.startswith(b"\\"): - hunkactual["linessrc"] += 1 - hunkactual["linestgt"] += 1 - hunk.text.append(line) - # todo: handle \ No newline cases - else: - warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, p.target)) - # add hunk status node - hunk.invalid = True - p.hunks.append(hunk) - self.errors += 1 - # switch to hunkskip state - hunkbody = False - hunkskip = True - - # check exit conditions - if hunkactual["linessrc"] > hunk.linessrc or hunkactual["linestgt"] > hunk.linestgt: - warning("extra lines for hunk no.%d at %d for target %s" % (nexthunkno, lineno+1, p.target)) - # add hunk status node - hunk.invalid = True - p.hunks.append(hunk) - self.errors += 1 - # switch to hunkskip state - hunkbody = False - hunkskip = True - elif hunk.linessrc == hunkactual["linessrc"] and hunk.linestgt == hunkactual["linestgt"]: - # hunk parsed successfully - p.hunks.append(hunk) - # switch to hunkparsed state - hunkbody = False - hunkparsed = True - - # detect mixed window/unix line ends - ends = p.hunkends - if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1: - warning("inconsistent line ends in patch hunks for %s" % p.source) - self.warnings += 1 - if debugmode: - debuglines = dict(ends) - debuglines.update(file=p.target, hunk=nexthunkno) - debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines) - # fetch next line - continue - - if hunkskip: - if re_hunk_start.match(line): - # switch to hunkhead state - hunkskip = False - hunkhead = True - elif line.startswith(b"--- "): - # switch to filenames state - hunkskip = False - filenames = True - if debugmode and len(self.items) > 0: - debug("- %2d hunks for %s" % (len(p.hunks), p.source)) - - if filenames: - if line.startswith(b"--- "): - if srcname != None: - # XXX testcase - warning("skipping false patch for %s" % srcname) - srcname = None - # XXX header += srcname - # double source filename line is encountered - # attempt to restart from this second line - re_filename = b"^--- ([^\t]+)" - match = re.match(re_filename, line) - # todo: support spaces in filenames - if match: - srcname = match.group(1).strip() - else: - warning("skipping invalid filename at line %d" % (lineno+1)) - self.errors += 1 - # XXX p.header += line - # switch back to headscan state - filenames = False - headscan = True - elif not line.startswith(b"+++ "): - if srcname != None: - warning("skipping invalid patch with no target for %s" % srcname) - self.errors += 1 - srcname = None - # XXX header += srcname - # XXX header += line - else: - # this should be unreachable - warning("skipping invalid target patch") - filenames = False - headscan = True - else: - if tgtname != None: - # XXX seems to be a dead branch - warning("skipping invalid patch - double target at line %d" % (lineno+1)) - self.errors += 1 - srcname = None - tgtname = None - # XXX header += srcname - # XXX header += tgtname - # XXX header += line - # double target filename line is encountered - # switch back to headscan state - filenames = False - headscan = True - else: - re_filename = b"^\+\+\+ ([^\t]+)" - match = re.match(re_filename, line) - if not match: - warning("skipping invalid patch - no target filename at line %d" % (lineno+1)) - self.errors += 1 - srcname = None - # switch back to headscan state - filenames = False - headscan = True - else: - if p: # for the first run p is None - self.items.append(p) - p = Patch() - p.source = srcname - srcname = None - p.target = match.group(1).strip() - p.header = header - header = [] - # switch to hunkhead state - filenames = False - hunkhead = True - nexthunkno = 0 - p.hunkends = lineends.copy() - continue - - if hunkhead: - match = re.match(b"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@(.*)", line) - if not match: - if not p.hunks: - warning("skipping invalid patch with no hunks for file %s" % p.source) - self.errors += 1 - # XXX review switch - # switch to headscan state - hunkhead = False - headscan = True - continue - else: - # TODO review condition case - # switch to headscan state - hunkhead = False - headscan = True - else: - hunk = Hunk() - hunk.startsrc = int(match.group(1)) - hunk.linessrc = 1 - if match.group(3): hunk.linessrc = int(match.group(3)) - hunk.starttgt = int(match.group(4)) - hunk.linestgt = 1 - if match.group(6): hunk.linestgt = int(match.group(6)) - hunk.invalid = False - hunk.desc = match.group(7)[1:].rstrip() - hunk.text = [] - - hunkactual["linessrc"] = hunkactual["linestgt"] = 0 - - # switch to hunkbody state - hunkhead = False - hunkbody = True - nexthunkno += 1 - continue - - # /while fe.next() - - if p: - self.items.append(p) - - if not hunkparsed: - if hunkskip: - warning("warning: finished with errors, some hunks may be invalid") - elif headscan: - if len(self.items) == 0: - warning("error: no patch data found!") - return False - else: # extra data at the end of file - pass - else: - warning("error: patch stream is incomplete!") - self.errors += 1 - if len(self.items) == 0: - return False - - if debugmode and len(self.items) > 0: - debug("- %2d hunks for %s" % (len(p.hunks), p.source)) - - # XXX fix total hunks calculation - debug("total files: %d total hunks: %d" % (len(self.items), - sum(len(p.hunks) for p in self.items))) - - # ---- detect patch and patchset types ---- - for idx, p in enumerate(self.items): - self.items[idx].type = self._detect_type(p) - - types = set([p.type for p in self.items]) - if len(types) > 1: - self.type = MIXED - else: - self.type = types.pop() - # -------- - - self._normalize_filenames() - - return (self.errors == 0) - - def _detect_type(self, p): - """ detect and return type for the specified Patch object - analyzes header and filenames info - - NOTE: must be run before filenames are normalized - """ - - # check for SVN - # - header starts with Index: - # - next line is ===... delimiter - # - filename is followed by revision number - # TODO add SVN revision - if (len(p.header) > 1 and p.header[-2].startswith(b"Index: ") - and p.header[-1].startswith(b"="*67)): - return SVN - - # common checks for both HG and GIT - DVCS = ((p.source.startswith(b'a/') or p.source == b'/dev/null') - and (p.target.startswith(b'b/') or p.target == b'/dev/null')) - - # GIT type check - # - header[-2] is like "diff --git a/oldname b/newname" - # - header[-1] is like "index <hash>..<hash> <mode>" - # TODO add git rename diffs and add/remove diffs - # add git diff with spaced filename - # TODO http://www.kernel.org/pub/software/scm/git/docs/git-diff.html - - # Git patch header len is 2 min - if len(p.header) > 1: - # detect the start of diff header - there might be some comments before - for idx in reversed(range(len(p.header))): - if p.header[idx].startswith(b"diff --git"): - break - if p.header[idx].startswith(b'diff --git a/'): - if (idx+1 < len(p.header) - and re.match(b'index \\w{7}..\\w{7} \\d{6}', p.header[idx+1])): - if DVCS: - return GIT - - # HG check - # - # - for plain HG format header is like "diff -r b2d9961ff1f5 filename" - # - for Git-style HG patches it is "diff --git a/oldname b/newname" - # - filename starts with a/, b/ or is equal to /dev/null - # - exported changesets also contain the header - # # HG changeset patch - # # User name@example.com - # ... - # TODO add MQ - # TODO add revision info - if len(p.header) > 0: - if DVCS and re.match(b'diff -r \\w{12} .*', p.header[-1]): - return HG - if DVCS and p.header[-1].startswith(b'diff --git a/'): - if len(p.header) == 1: # native Git patch header len is 2 - return HG - elif p.header[0].startswith(b'# HG changeset patch'): - return HG - - return PLAIN - - - def _normalize_filenames(self): - """ sanitize filenames, normalizing paths, i.e.: - 1. strip a/ and b/ prefixes from GIT and HG style patches - 2. remove all references to parent directories (with warning) - 3. translate any absolute paths to relative (with warning) - - [x] always use forward slashes to be crossplatform - (diff/patch were born as a unix utility after all) - - return None - """ - if debugmode: - debug("normalize filenames") - for i,p in enumerate(self.items): - if debugmode: - debug(" patch type = " + p.type) - debug(" source = " + p.source) - debug(" target = " + p.target) - if p.type in (HG, GIT): - # TODO: figure out how to deal with /dev/null entries - debug("stripping a/ and b/ prefixes") - if p.source != '/dev/null': - if not p.source.startswith(b"a/"): - warning("invalid source filename") - else: - p.source = p.source[2:] - if p.target != '/dev/null': - if not p.target.startswith(b"b/"): - warning("invalid target filename") - else: - p.target = p.target[2:] - - p.source = xnormpath(p.source) - p.target = xnormpath(p.target) - - sep = b'/' # sep value can be hardcoded, but it looks nice this way - - # references to parent are not allowed - if p.source.startswith(b".." + sep): - warning("error: stripping parent path for source file patch no.%d" % (i+1)) - self.warnings += 1 - while p.source.startswith(b".." + sep): - p.source = p.source.partition(sep)[2] - if p.target.startswith(b".." + sep): - warning("error: stripping parent path for target file patch no.%d" % (i+1)) - self.warnings += 1 - while p.target.startswith(b".." + sep): - p.target = p.target.partition(sep)[2] - # absolute paths are not allowed - if xisabs(p.source) or xisabs(p.target): - warning("error: absolute paths are not allowed - file no.%d" % (i+1)) - self.warnings += 1 - if xisabs(p.source): - warning("stripping absolute path from source name '%s'" % p.source) - p.source = xstrip(p.source) - if xisabs(p.target): - warning("stripping absolute path from target name '%s'" % p.target) - p.target = xstrip(p.target) - - self.items[i].source = p.source - self.items[i].target = p.target - - - def diffstat(self): - """ calculate diffstat and return as a string - Notes: - - original diffstat ouputs target filename - - single + or - shouldn't escape histogram - """ - names = [] - insert = [] - delete = [] - delta = 0 # size change in bytes - namelen = 0 - maxdiff = 0 # max number of changes for single file - # (for histogram width calculation) - for patch in self.items: - i,d = 0,0 - for hunk in patch.hunks: - for line in hunk.text: - if line.startswith(b'+'): - i += 1 - delta += len(line)-1 - elif line.startswith(b'-'): - d += 1 - delta -= len(line)-1 - names.append(patch.target) - insert.append(i) - delete.append(d) - namelen = max(namelen, len(patch.target)) - maxdiff = max(maxdiff, i+d) - output = '' - statlen = len(str(maxdiff)) # stats column width - for i,n in enumerate(names): - # %-19s | %-4d %s - format = " %-" + str(namelen) + "s | %" + str(statlen) + "s %s\n" - - hist = '' - # -- calculating histogram -- - width = len(format % ('', '', '')) - histwidth = max(2, 80 - width) - if maxdiff < histwidth: - hist = "+"*insert[i] + "-"*delete[i] - else: - iratio = (float(insert[i]) / maxdiff) * histwidth - dratio = (float(delete[i]) / maxdiff) * histwidth - - # make sure every entry gets at least one + or - - iwidth = 1 if 0 < iratio < 1 else int(iratio) - dwidth = 1 if 0 < dratio < 1 else int(dratio) - #print(iratio, dratio, iwidth, dwidth, histwidth) - hist = "+"*int(iwidth) + "-"*int(dwidth) - # -- /calculating +- histogram -- - output += (format % (tostr(names[i]), str(insert[i] + delete[i]), hist)) - - output += (" %d files changed, %d insertions(+), %d deletions(-), %+d bytes" - % (len(names), sum(insert), sum(delete), delta)) - return output - - - def findfile(self, old, new): - """ return name of file to be patched or None """ - if exists(old): - return old - elif exists(new): - return new - else: - # [w] Google Code generates broken patches with its online editor - debug("broken patch from Google Code, stripping prefixes..") - if old.startswith(b'a/') and new.startswith(b'b/'): - old, new = old[2:], new[2:] - debug(" %s" % old) - debug(" %s" % new) - if exists(old): - return old - elif exists(new): - return new - return None - - - def apply(self, strip=0, root=None): - """ Apply parsed patch, optionally stripping leading components - from file paths. `root` parameter specifies working dir. - return True on success - """ - if root: - prevdir = os.getcwd() - os.chdir(root) - - total = len(self.items) - errors = 0 - if strip: - # [ ] test strip level exceeds nesting level - # [ ] test the same only for selected files - # [ ] test if files end up being on the same level - try: - strip = int(strip) - except ValueError: - errors += 1 - warning("error: strip parameter '%s' must be an integer" % strip) - strip = 0 - - #for fileno, filename in enumerate(self.source): - for i,p in enumerate(self.items): - if strip: - debug("stripping %s leading component(s) from:" % strip) - debug(" %s" % p.source) - debug(" %s" % p.target) - old = pathstrip(p.source, strip) - new = pathstrip(p.target, strip) - else: - old, new = p.source, p.target - - filename = self.findfile(old, new) - - if not filename: - warning("source/target file does not exist:\n --- %s\n +++ %s" % (old, new)) - errors += 1 - continue - if not isfile(filename): - warning("not a file - %s" % filename) - errors += 1 - continue - - # [ ] check absolute paths security here - debug("processing %d/%d:\t %s" % (i+1, total, filename)) - - # validate before patching - f2fp = open(filename, 'rb') - hunkno = 0 - hunk = p.hunks[hunkno] - hunkfind = [] - hunkreplace = [] - validhunks = 0 - canpatch = False - for lineno, line in enumerate(f2fp): - if lineno+1 < hunk.startsrc: - continue - elif lineno+1 == hunk.startsrc: - hunkfind = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" -"] - hunkreplace = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" +"] - #pprint(hunkreplace) - hunklineno = 0 - - # todo \ No newline at end of file - - # check hunks in source file - if lineno+1 < hunk.startsrc+len(hunkfind)-1: - if line.rstrip(b"\r\n") == hunkfind[hunklineno]: - hunklineno+=1 - else: - info("file %d/%d:\t %s" % (i+1, total, filename)) - info(" hunk no.%d doesn't match source file at line %d" % (hunkno+1, lineno+1)) - info(" expected: %s" % hunkfind[hunklineno]) - info(" actual : %s" % line.rstrip(b"\r\n")) - # not counting this as error, because file may already be patched. - # check if file is already patched is done after the number of - # invalid hunks if found - # TODO: check hunks against source/target file in one pass - # API - check(stream, srchunks, tgthunks) - # return tuple (srcerrs, tgterrs) - - # continue to check other hunks for completeness - hunkno += 1 - if hunkno < len(p.hunks): - hunk = p.hunks[hunkno] - continue - else: - break - - # check if processed line is the last line - if lineno+1 == hunk.startsrc+len(hunkfind)-1: - debug(" hunk no.%d for file %s -- is ready to be patched" % (hunkno+1, filename)) - hunkno+=1 - validhunks+=1 - if hunkno < len(p.hunks): - hunk = p.hunks[hunkno] - else: - if validhunks == len(p.hunks): - # patch file - canpatch = True - break - else: - if hunkno < len(p.hunks): - warning("premature end of source file %s at hunk %d" % (filename, hunkno+1)) - errors += 1 - - f2fp.close() - - if validhunks < len(p.hunks): - if self._match_file_hunks(filename, p.hunks): - warning("already patched %s" % filename) - else: - warning("source file is different - %s" % filename) - errors += 1 - if canpatch: - backupname = filename+b".orig" - if exists(backupname): - warning("can't backup original file to %s - aborting" % backupname) - else: - import shutil - shutil.move(filename, backupname) - if self.write_hunks(backupname, filename, p.hunks): - info("successfully patched %d/%d:\t %s" % (i+1, total, filename)) - os.unlink(backupname) - else: - errors += 1 - warning("error patching file %s" % filename) - shutil.copy(filename, filename+".invalid") - warning("invalid version is saved to %s" % filename+".invalid") - # todo: proper rejects - shutil.move(backupname, filename) - - if root: - os.chdir(prevdir) - - # todo: check for premature eof - return (errors == 0) - - - def _reverse(self): - """ reverse patch direction (this doesn't touch filenames) """ - for p in self.items: - for h in p.hunks: - h.startsrc, h.starttgt = h.starttgt, h.startsrc - h.linessrc, h.linestgt = h.linestgt, h.linessrc - for i,line in enumerate(h.text): - # need to use line[0:1] here, because line[0] - # returns int instead of bytes on Python 3 - if line[0:1] == b'+': - h.text[i] = b'-' + line[1:] - elif line[0:1] == b'-': - h.text[i] = b'+' +line[1:] - - def revert(self, strip=0, root=None): - """ apply patch in reverse order """ - reverted = copy.deepcopy(self) - reverted._reverse() - return reverted.apply(strip, root) - - - def can_patch(self, filename): - """ Check if specified filename can be patched. Returns None if file can - not be found among source filenames. False if patch can not be applied - clearly. True otherwise. - - :returns: True, False or None - """ - filename = abspath(filename) - for p in self.items: - if filename == abspath(p.source): - return self._match_file_hunks(filename, p.hunks) - return None - - - def _match_file_hunks(self, filepath, hunks): - matched = True - fp = open(abspath(filepath), 'rb') - - class NoMatch(Exception): - pass - - lineno = 1 - line = fp.readline() - hno = None - try: - for hno, h in enumerate(hunks): - # skip to first line of the hunk - while lineno < h.starttgt: - if not len(line): # eof - debug("check failed - premature eof before hunk: %d" % (hno+1)) - raise NoMatch - line = fp.readline() - lineno += 1 - for hline in h.text: - if hline.startswith(b"-"): - continue - if not len(line): - debug("check failed - premature eof on hunk: %d" % (hno+1)) - # todo: \ No newline at the end of file - raise NoMatch - if line.rstrip(b"\r\n") != hline[1:].rstrip(b"\r\n"): - debug("file is not patched - failed hunk: %d" % (hno+1)) - raise NoMatch - line = fp.readline() - lineno += 1 - - except NoMatch: - matched = False - # todo: display failed hunk, i.e. expected/found - - fp.close() - return matched - - - def patch_stream(self, instream, hunks): - """ Generator that yields stream patched with hunks iterable - - Converts lineends in hunk lines to the best suitable format - autodetected from input - """ - - # todo: At the moment substituted lineends may not be the same - # at the start and at the end of patching. Also issue a - # warning/throw about mixed lineends (is it really needed?) - - hunks = iter(hunks) - - srclineno = 1 - - lineends = {b'\n':0, b'\r\n':0, b'\r':0} - def get_line(): - """ - local utility function - return line from source stream - collecting line end statistics on the way - """ - line = instream.readline() - # 'U' mode works only with text files - if line.endswith(b"\r\n"): - lineends[b"\r\n"] += 1 - elif line.endswith(b"\n"): - lineends[b"\n"] += 1 - elif line.endswith(b"\r"): - lineends[b"\r"] += 1 - return line - - for hno, h in enumerate(hunks): - debug("hunk %d" % (hno+1)) - # skip to line just before hunk starts - while srclineno < h.startsrc: - yield get_line() - srclineno += 1 - - for hline in h.text: - # todo: check \ No newline at the end of file - if hline.startswith(b"-") or hline.startswith(b"\\"): - get_line() - srclineno += 1 - continue - else: - if not hline.startswith(b"+"): - get_line() - srclineno += 1 - line2write = hline[1:] - # detect if line ends are consistent in source file - if sum([bool(lineends[x]) for x in lineends]) == 1: - newline = [x for x in lineends if lineends[x] != 0][0] - yield line2write.rstrip(b"\r\n")+newline - else: # newlines are mixed - yield line2write - - for line in instream: - yield line - - - def write_hunks(self, srcname, tgtname, hunks): - src = open(srcname, "rb") - tgt = open(tgtname, "wb") - - debug("processing target file %s" % tgtname) - - tgt.writelines(self.patch_stream(src, hunks)) - - tgt.close() - src.close() - # [ ] TODO: add test for permission copy - shutil.copymode(srcname, tgtname) - return True - - - def dump(self): - for p in self.items: - for headline in p.header: - print(headline.rstrip('\n')) - print('--- ' + p.source) - print('+++ ' + p.target) - for h in p.hunks: - print('@@ -%s,%s +%s,%s @@' % (h.startsrc, h.linessrc, h.starttgt, h.linestgt)) - for line in h.text: - print(line.rstrip('\n')) - - -def main(): - from optparse import OptionParser - from os.path import exists - import sys - - opt = OptionParser(usage="1. %prog [options] unified.diff\n" - " 2. %prog [options] http://host/patch\n" - " 3. %prog [options] -- < unified.diff", - version="python-patch %s" % __version__) - opt.add_option("-q", "--quiet", action="store_const", dest="verbosity", - const=0, help="print only warnings and errors", default=1) - opt.add_option("-v", "--verbose", action="store_const", dest="verbosity", - const=2, help="be verbose") - opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode") - opt.add_option("--diffstat", action="store_true", dest="diffstat", - help="print diffstat and exit") - opt.add_option("-d", "--directory", metavar='DIR', - help="specify root directory for applying patch") - opt.add_option("-p", "--strip", type="int", metavar='N', default=0, - help="strip N path components from filenames") - opt.add_option("--revert", action="store_true", - help="apply patch in reverse order (unpatch)") - (options, args) = opt.parse_args() - - if not args and sys.argv[-1:] != ['--']: - opt.print_version() - opt.print_help() - sys.exit() - readstdin = (sys.argv[-1:] == ['--'] and not args) - - verbosity_levels = {0:logging.WARNING, 1:logging.INFO, 2:logging.DEBUG} - loglevel = verbosity_levels[options.verbosity] - logformat = "%(message)s" - logger.setLevel(loglevel) - streamhandler.setFormatter(logging.Formatter(logformat)) - - if options.debugmode: - setdebug() # this sets global debugmode variable - - if readstdin: - patch = PatchSet(sys.stdin) - else: - patchfile = args[0] - urltest = patchfile.split(':')[0] - if (':' in patchfile and urltest.isalpha() - and len(urltest) > 1): # one char before : is a windows drive letter - patch = fromurl(patchfile) - else: - if not exists(patchfile) or not isfile(patchfile): - sys.exit("patch file does not exist - %s" % patchfile) - patch = fromfile(patchfile) - - if options.diffstat: - print(patch.diffstat()) - sys.exit(0) - - #pprint(patch) - if options.revert: - patch.revert(options.strip, root=options.directory) or sys.exit(-1) - else: - patch.apply(options.strip, root=options.directory) or sys.exit(-1) - - # todo: document and test line ends handling logic - patch.py detects proper line-endings - # for inserted hunks and issues a warning if patched file has incosistent line ends - - -if __name__ == "__main__": - main() - -# Legend: -# [ ] - some thing to be done -# [w] - official wart, external or internal that is unlikely to be fixed - -# [ ] API break (2.x) wishlist -# PatchSet.items --> PatchSet.patches - -# [ ] run --revert test for all dataset items -# [ ] run .parse() / .dump() test for dataset diff --git a/contrib/python/patch/ya.make b/contrib/python/patch/ya.make deleted file mode 100644 index 319e7c7cef..0000000000 --- a/contrib/python/patch/ya.make +++ /dev/null @@ -1,22 +0,0 @@ -# Generated by devtools/yamaker (pypi). - -PY3_LIBRARY() - -VERSION(1.16) - -LICENSE(MIT) - -NO_LINT() - -PY_SRCS( - TOP_LEVEL - patch.py -) - -RESOURCE_FILES( - PREFIX contrib/python/patch/ - .dist-info/METADATA - .dist-info/top_level.txt -) - -END() diff --git a/yql/essentials/core/extract_predicate/ut/ya.make b/yql/essentials/core/extract_predicate/ut/ya.make index defc5a8d6b..69bf6c10cc 100644 --- a/yql/essentials/core/extract_predicate/ut/ya.make +++ b/yql/essentials/core/extract_predicate/ut/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + UNITTEST_FOR(yql/essentials/core/extract_predicate) SRCS( @@ -37,3 +39,6 @@ ELSE() ENDIF() END() + +ENDIF() + diff --git a/yql/essentials/core/qplayer/storage/ydb/ut/ya.make b/yql/essentials/core/qplayer/storage/ydb/ut/ya.make index c817aa9389..70285a5126 100644 --- a/yql/essentials/core/qplayer/storage/ydb/ut/ya.make +++ b/yql/essentials/core/qplayer/storage/ydb/ut/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + UNITTEST_FOR(yql/essentials/core/qplayer/storage/ydb) SRCS( @@ -9,3 +11,6 @@ PEERDIR( ) END() + +ENDIF() + diff --git a/yql/essentials/core/qplayer/storage/ydb/ya.make b/yql/essentials/core/qplayer/storage/ydb/ya.make index 0d220f2dd4..e68e67d237 100644 --- a/yql/essentials/core/qplayer/storage/ydb/ya.make +++ b/yql/essentials/core/qplayer/storage/ydb/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + LIBRARY() SRCS( @@ -16,3 +18,6 @@ END() RECURSE_FOR_TESTS( ut ) + +ENDIF() + diff --git a/yql/essentials/core/ut/ya.make b/yql/essentials/core/ut/ya.make index 862ba37d82..8b7c2d7b60 100644 --- a/yql/essentials/core/ut/ya.make +++ b/yql/essentials/core/ut/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + UNITTEST_FOR(yql/essentials/core) SRCS( @@ -59,3 +61,5 @@ ENDIF() YQL_LAST_ABI_VERSION() END() + +ENDIF() diff --git a/yql/essentials/core/ut_common/ya.make b/yql/essentials/core/ut_common/ya.make index d6c008557c..04c59aaf3d 100644 --- a/yql/essentials/core/ut_common/ya.make +++ b/yql/essentials/core/ut_common/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + LIBRARY() SRCS( @@ -17,3 +19,6 @@ YQL_LAST_ABI_VERSION() END() + +ENDIF() + diff --git a/yql/essentials/minikql/mkql_node.cpp b/yql/essentials/minikql/mkql_node.cpp index dfafbb9a6d..434171131b 100644 --- a/yql/essentials/minikql/mkql_node.cpp +++ b/yql/essentials/minikql/mkql_node.cpp @@ -5,7 +5,6 @@ #include "mkql_node_printer.h" #include "mkql_runtime_version.h" #include <yql/essentials/parser/pg_catalog/catalog.h> -#include <contrib/ydb/public/lib/scheme_types/scheme_type_id.h> #include <util/stream/str.h> #include <util/string/join.h> @@ -521,7 +520,6 @@ TDataType::TDataType(NUdf::TDataTypeId schemeType, const TTypeEnvironment& env) TDataType* TDataType::Create(NUdf::TDataTypeId schemeType, const TTypeEnvironment& env) { MKQL_ENSURE(schemeType, "Null type isn't allowed."); MKQL_ENSURE(schemeType != NUdf::TDataType<NUdf::TDecimal>::Id, "Can't' create Decimal."); - MKQL_ENSURE(schemeType != NKikimr::NScheme::NTypeIds::Pg, "Can't create Pg."); MKQL_ENSURE(schemeType != 0, "0 type"); return ::new(env.Allocate<TDataType>()) TDataType(schemeType, env); } diff --git a/yql/essentials/minikql/protobuf_udf/ut/ya.make b/yql/essentials/minikql/protobuf_udf/ut/ya.make index 990faca52f..d7480199d1 100644 --- a/yql/essentials/minikql/protobuf_udf/ut/ya.make +++ b/yql/essentials/minikql/protobuf_udf/ut/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + UNITTEST_FOR(yql/essentials/minikql/protobuf_udf) SRCS( @@ -24,3 +26,6 @@ PEERDIR( YQL_LAST_ABI_VERSION() END() + +ENDIF() + diff --git a/yql/essentials/parser/pg_wrapper/test/ya.make b/yql/essentials/parser/pg_wrapper/test/ya.make index 6d1a494f28..4ee6684cf7 100644 --- a/yql/essentials/parser/pg_wrapper/test/ya.make +++ b/yql/essentials/parser/pg_wrapper/test/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + PY3TEST() TEST_SRCS( @@ -39,3 +41,6 @@ IF (SANITIZER_TYPE == "memory") ENDIF() END() + +ENDIF() + diff --git a/yql/essentials/tools/exports.symlist b/yql/essentials/tools/exports.symlist new file mode 100644 index 0000000000..f7a16b35c1 --- /dev/null +++ b/yql/essentials/tools/exports.symlist @@ -0,0 +1,1179 @@ +{ +global: + +# Do not hide weak symbols from glibc as this causes unexpected behaviour, +# specifically during TLS memory allocations. +__after_morecore_hook; +daylight; +environ; +_environ; +__free_hook; +__malloc_hook; +__malloc_initialize_hook; +__memalign_hook; +program_invocation_name; +program_invocation_short_name; +__realloc_hook; +timezone; +tzname; + +# Do not hide glibc overrides for same reason. +__libc_csu_fini; +__libc_csu_init; +__libc_memalign; +__libc_start_main; + +# sanitizers +__asan*; +__sanitizer*; +__tsan*; +__ubsan*; +__msan*; +__lsan*; + +# tsan +AnnotateHappensAfter; +AnnotateHappensBefore; +AnnotateIgnoreWritesBegin; +AnnotateIgnoreWritesEnd; +AnnotateIgnoreReadsBegin; +AnnotateIgnoreReadsEnd; + +# tsan interceptors +abort; +accept; +accept4; +aligned_alloc; +asctime; +asctime_r; +asprintf; +atexit; +backtrace; +backtrace_symbols; +bind; +calloc; +canonicalize_file_name; +capget; +capset; +cfree; +clock_getres; +clock_gettime; +clock_settime; +close; +__close; +closedir; +confstr; +connect; +creat; +creat64; +ctermid; +ctime; +ctime_r; +__cxa_atexit; +dlclose; +dl_iterate_phdr; +dlopen; +drand48_r; +dup; +dup2; +dup3; +endgrent; +endpwent; +epoll_create; +epoll_create1; +epoll_ctl; +epoll_pwait; +epoll_wait; +ether_aton; +ether_aton_r; +ether_hostton; +ether_line; +ether_ntoa; +ether_ntoa_r; +ether_ntohost; +eventfd; +eventfd_read; +eventfd_write; +_exit; +fclose; +fdopen; +fflush; +fgetxattr; +flistxattr; +fmemopen; +fopen; +fopen64; +fopencookie; +fork; +fprintf; +fread; +free; +freopen; +freopen64; +frexp; +frexpf; +frexpl; +fscanf; +fstat; +fstat64; +fstatfs; +fstatfs64; +fstatvfs; +fstatvfs64; +ftime; +fwrite; +__fxstat; +__fxstat64; +getaddrinfo; +get_current_dir_name; +getcwd; +getdelim; +__getdelim; +getgroups; +gethostbyaddr; +gethostbyaddr_r; +gethostbyname; +gethostbyname2; +gethostbyname2_r; +gethostbyname_r; +gethostent; +gethostent_r; +getifaddrs; +getitimer; +getline; +getmntent; +getmntent_r; +getnameinfo; +getpass; +getpeername; +getresgid; +getresuid; +getsockname; +getsockopt; +gettimeofday; +getxattr; +glob; +glob64; +gmtime; +gmtime_r; +iconv; +if_indextoname; +if_nametoindex; +inet_aton; +inet_ntop; +inet_pton; +initgroups; +inotify_init; +inotify_init1; +ioctl; +__isoc99_fprintf; +__isoc99_fscanf; +__isoc99_printf; +__isoc99_scanf; +__isoc99_snprintf; +__isoc99_sprintf; +__isoc99_sscanf; +__isoc99_vfprintf; +__isoc99_vfscanf; +__isoc99_vprintf; +__isoc99_vscanf; +__isoc99_vsnprintf; +__isoc99_vsprintf; +__isoc99_vsscanf; +kill; +lgamma; +lgammaf; +lgammaf_r; +lgammal; +lgammal_r; +lgamma_r; +lgetxattr; +__libc_memalign; +listen; +listxattr; +llistxattr; +localtime; +localtime_r; +longjmp; +lrand48_r; +lstat; +lstat64; +__lxstat; +__lxstat64; +malloc; +malloc_usable_size; +mbsnrtowcs; +mbsrtowcs; +mbstowcs; +memalign; +memchr; +memcmp; +memcpy; +memmem; +memmove; +memrchr; +memset; +mincore; +mktime; +mlock; +mlockall; +mmap; +mmap64; +modf; +modff; +modfl; +munlock; +munlockall; +munmap; +nanosleep; +_obstack_begin; +_obstack_begin_1; +_obstack_newchunk; +on_exit; +open; +open64; +opendir; +open_memstream; +open_wmemstream; +__overflow; +pipe; +pipe2; +poll; +posix_memalign; +ppoll; +prctl; +pread; +pread64; +preadv; +preadv64; +printf; +process_vm_readv; +process_vm_writev; +pthread_attr_getaffinity_np; +pthread_attr_getdetachstate; +pthread_attr_getguardsize; +pthread_attr_getinheritsched; +pthread_attr_getschedparam; +pthread_attr_getschedpolicy; +pthread_attr_getscope; +pthread_attr_getstack; +pthread_attr_getstacksize; +pthread_barrierattr_getpshared; +pthread_barrier_destroy; +pthread_barrier_init; +pthread_barrier_wait; +pthread_condattr_getclock; +pthread_condattr_getpshared; +pthread_cond_broadcast; +pthread_cond_destroy; +pthread_cond_init; +pthread_cond_signal; +pthread_cond_timedwait; +pthread_cond_wait; +pthread_create; +pthread_detach; +pthread_getschedparam; +pthread_join; +pthread_kill; +pthread_mutexattr_getprioceiling; +pthread_mutexattr_getprotocol; +pthread_mutexattr_getpshared; +pthread_mutexattr_getrobust; +pthread_mutexattr_getrobust_np; +pthread_mutexattr_gettype; +pthread_mutex_destroy; +pthread_mutex_init; +pthread_mutex_lock; +pthread_mutex_timedlock; +pthread_mutex_trylock; +pthread_mutex_unlock; +pthread_once; +pthread_rwlockattr_getkind_np; +pthread_rwlockattr_getpshared; +pthread_rwlock_destroy; +pthread_rwlock_init; +pthread_rwlock_rdlock; +pthread_rwlock_timedrdlock; +pthread_rwlock_timedwrlock; +pthread_rwlock_tryrdlock; +pthread_rwlock_trywrlock; +pthread_rwlock_unlock; +pthread_rwlock_wrlock; +pthread_setcancelstate; +pthread_setcanceltype; +pthread_setname_np; +pthread_spin_destroy; +pthread_spin_init; +pthread_spin_lock; +pthread_spin_trylock; +pthread_spin_unlock; +ptrace; +puts; +pvalloc; +pwrite; +pwrite64; +pwritev; +pwritev64; +raise; +random_r; +rand_r; +read; +readdir; +readdir64; +readdir64_r; +readdir_r; +readv; +realloc; +realpath; +recv; +recvfrom; +recvmsg; +remquo; +remquof; +remquol; +__res_iclose; +rmdir; +scandir; +scandir64; +scanf; +sched_getaffinity; +sched_getparam; +sem_destroy; +sem_getvalue; +sem_init; +sem_post; +sem_timedwait; +sem_trywait; +sem_wait; +send; +sendmsg; +sendto; +setgrent; +setitimer; +setjmp; +_setjmp; +setlocale; +setpwent; +shmctl; +sigaction; +sigemptyset; +sigfillset; +siglongjmp; +signal; +signalfd; +sigpending; +sigprocmask; +sigsetjmp; +__sigsetjmp; +sigsuspend; +sigtimedwait; +sigwait; +sigwaitinfo; +sincos; +sincosf; +sincosl; +sleep; +snprintf; +socket; +socketpair; +sprintf; +sscanf; +stat; +stat64; +statfs; +statfs64; +statvfs; +statvfs64; +strcasecmp; +strcasestr; +strchr; +strchrnul; +strcmp; +strcpy; +strcspn; +strdup; +strerror; +strerror_r; +strlen; +strncasecmp; +strncmp; +strncpy; +strnlen; +strpbrk; +strptime; +strrchr; +strspn; +strstr; +strtoimax; +strtoumax; +sysinfo; +tcgetattr; +tempnam; +textdomain; +time; +timerfd_gettime; +timerfd_settime; +times; +__tls_get_addr; +tmpfile; +tmpfile64; +tmpnam; +tmpnam_r; +tsearch; +__uflow; +__underflow; +unlink; +usleep; +valloc; +vasprintf; +vfork; +vfprintf; +vfscanf; +vprintf; +vscanf; +vsnprintf; +vsprintf; +vsscanf; +wait; +wait3; +wait4; +waitid; +waitpid; +wcrtomb; +wcsnrtombs; +wcsrtombs; +wcstombs; +wordexp; +__woverflow; +write; +writev; +__wuflow; +__wunderflow; +xdr_bool; +xdr_bytes; +xdr_char; +xdr_double; +xdr_enum; +xdr_float; +xdr_hyper; +xdr_int; +xdr_int16_t; +xdr_int32_t; +xdr_int64_t; +xdr_int8_t; +xdr_long; +xdr_longlong_t; +xdrmem_create; +xdr_quad_t; +xdr_short; +xdrstdio_create; +xdr_string; +xdr_u_char; +xdr_u_hyper; +xdr_u_int; +xdr_uint16_t; +xdr_uint32_t; +xdr_uint64_t; +xdr_uint8_t; +xdr_u_long; +xdr_u_longlong_t; +xdr_u_quad_t; +xdr_u_short; +__xpg_strerror_r; +__xstat; +__xstat64; + +# msan interceptors +accept; +accept4; +aligned_alloc; +asctime; +asctime_r; +asprintf; +backtrace; +backtrace_symbols; +bcopy; +calloc; +canonicalize_file_name; +capget; +capset; +cfree; +clock_getres; +clock_gettime; +clock_settime; +confstr; +ctermid; +ctime; +ctime_r; +__cxa_atexit; +dladdr; +dlclose; +dlerror; +dl_iterate_phdr; +dlopen; +drand48_r; +endgrent; +endpwent; +epoll_pwait; +epoll_wait; +ether_aton; +ether_aton_r; +ether_hostton; +ether_line; +ether_ntoa; +ether_ntoa_r; +ether_ntohost; +eventfd_read; +eventfd_write; +_exit; +fclose; +fcvt; +fdopen; +fflush; +fgetgrent; +fgetgrent_r; +fgetpwent; +fgetpwent_r; +fgets; +fgets_unlocked; +fgetxattr; +flistxattr; +fmemopen; +fopen; +fopen64; +fopencookie; +fork; +forkpty; +fprintf; +fread; +fread_unlocked; +free; +freopen; +freopen64; +frexp; +frexpf; +frexpl; +fscanf; +fstatfs; +fstatfs64; +fstatvfs; +fstatvfs64; +ftime; +__fxstat; +__fxstat64; +__fxstatat; +__fxstatat64; +gcvt; +getaddrinfo; +get_current_dir_name; +getcwd; +getdelim; +__getdelim; +getenv; +getgrent; +getgrent_r; +getgrgid; +getgrgid_r; +getgrnam; +getgrnam_r; +getgroups; +gethostbyaddr; +gethostbyaddr_r; +gethostbyname; +gethostbyname2; +gethostbyname2_r; +gethostbyname_r; +gethostent; +gethostent_r; +gethostname; +getifaddrs; +getitimer; +getline; +getmntent; +getmntent_r; +getnameinfo; +getpass; +getpeername; +getpwent; +getpwent_r; +getpwnam; +getpwnam_r; +getpwuid; +getpwuid_r; +getresgid; +getresuid; +getrlimit; +getrlimit64; +getrusage; +getsockname; +getsockopt; +gettimeofday; +getxattr; +glob; +glob64; +gmtime; +gmtime_r; +iconv; +if_indextoname; +if_nametoindex; +inet_aton; +inet_ntop; +inet_pton; +initgroups; +ioctl; +__isoc99_fprintf; +__isoc99_fscanf; +__isoc99_printf; +__isoc99_scanf; +__isoc99_snprintf; +__isoc99_sprintf; +__isoc99_sscanf; +__isoc99_vfprintf; +__isoc99_vfscanf; +__isoc99_vprintf; +__isoc99_vscanf; +__isoc99_vsnprintf; +__isoc99_vsprintf; +__isoc99_vsscanf; +lgamma; +lgammaf; +lgammaf_r; +lgammal; +lgammal_r; +lgamma_r; +lgetxattr; +__libc_memalign; +listxattr; +llistxattr; +localtime; +localtime_r; +lrand48_r; +__lxstat; +__lxstat64; +mallinfo; +malloc; +malloc_stats; +malloc_usable_size; +mallopt; +mbrtowc; +mbsnrtowcs; +mbsrtowcs; +mbstowcs; +mbtowc; +memalign; +memccpy; +memchr; +memcmp; +memcpy; +memmem; +memmove; +mempcpy; +memrchr; +memset; +mincore; +mktime; +mlock; +mlockall; +mmap; +mmap64; +modf; +modff; +modfl; +munlock; +munlockall; +_obstack_begin; +_obstack_begin_1; +_obstack_newchunk; +opendir; +open_memstream; +openpty; +open_wmemstream; +__overflow; +pipe; +pipe2; +poll; +posix_memalign; +ppoll; +prctl; +pread; +pread64; +preadv; +preadv64; +printf; +prlimit; +prlimit64; +process_vm_readv; +process_vm_writev; +pthread_attr_getaffinity_np; +pthread_attr_getdetachstate; +pthread_attr_getguardsize; +pthread_attr_getinheritsched; +pthread_attr_getschedparam; +pthread_attr_getschedpolicy; +pthread_attr_getscope; +pthread_attr_getstack; +pthread_attr_getstacksize; +pthread_barrierattr_getpshared; +pthread_condattr_getclock; +pthread_condattr_getpshared; +pthread_create; +pthread_getschedparam; +pthread_join; +pthread_key_create; +pthread_mutexattr_getprioceiling; +pthread_mutexattr_getprotocol; +pthread_mutexattr_getpshared; +pthread_mutexattr_getrobust; +pthread_mutexattr_getrobust_np; +pthread_mutexattr_gettype; +pthread_mutex_lock; +pthread_mutex_unlock; +pthread_rwlockattr_getkind_np; +pthread_rwlockattr_getpshared; +pthread_setcancelstate; +pthread_setcanceltype; +pthread_setname_np; +ptrace; +putenv; +pvalloc; +pwrite; +pwrite64; +pwritev; +pwritev64; +random_r; +rand_r; +read; +readdir; +readdir64; +readdir64_r; +readdir_r; +readlink; +readv; +realloc; +realpath; +recv; +recvfrom; +recvmsg; +remquo; +remquof; +remquol; +scandir; +scandir64; +scanf; +sched_getaffinity; +sched_getparam; +sem_destroy; +sem_getvalue; +sem_init; +sem_post; +sem_timedwait; +sem_trywait; +sem_wait; +send; +sendmsg; +sendto; +setenv; +setgrent; +setitimer; +setlocale; +setpwent; +shmat; +shmctl; +sigaction; +sigemptyset; +sigfillset; +signal; +sigpending; +sigprocmask; +sigtimedwait; +sigwait; +sigwaitinfo; +sincos; +sincosf; +sincosl; +snprintf; +socketpair; +sprintf; +sscanf; +statfs; +statfs64; +statvfs; +statvfs64; +stpcpy; +strcasecmp; +strcasestr; +strcat; +strchr; +strchrnul; +strcmp; +strcpy; +strcspn; +strdup; +__strdup; +strerror; +strerror_r; +strftime; +__strftime_l; +strftime_l; +strlen; +strncasecmp; +strncat; +strncmp; +strncpy; +strndup; +__strndup; +strnlen; +strpbrk; +strptime; +strrchr; +strspn; +strstr; +strtod; +__strtod_internal; +__strtod_l; +strtod_l; +strtof; +__strtof_internal; +__strtof_l; +strtof_l; +strtoimax; +strtol; +strtold; +__strtold_internal; +__strtold_l; +strtold_l; +__strtol_internal; +strtoll; +__strtol_l; +strtol_l; +__strtoll_internal; +__strtoll_l; +strtoll_l; +strtoul; +__strtoul_internal; +strtoull; +__strtoul_l; +strtoul_l; +__strtoull_internal; +__strtoull_l; +strtoull_l; +strtoumax; +strxfrm; +strxfrm_l; +swprintf; +sysinfo; +tcgetattr; +tempnam; +textdomain; +time; +timerfd_gettime; +timerfd_settime; +times; +__tls_get_addr; +tmpnam; +tmpnam_r; +tsearch; +tzset; +__uflow; +uname; +__underflow; +valloc; +vasprintf; +vfprintf; +vfscanf; +vprintf; +vscanf; +vsnprintf; +vsprintf; +vsscanf; +vswprintf; +wait; +wait3; +wait4; +waitid; +waitpid; +wcrtomb; +wcschr; +wcscmp; +wcscpy; +wcsftime; +__wcsftime_l; +wcsftime_l; +wcslen; +wcsnrtombs; +wcsrtombs; +wcstod; +__wcstod_internal; +__wcstod_l; +wcstod_l; +wcstof; +__wcstof_internal; +__wcstof_l; +wcstof_l; +wcstol; +wcstold; +__wcstold_internal; +__wcstold_l; +wcstold_l; +__wcstol_internal; +wcstoll; +__wcstol_l; +wcstol_l; +__wcstoll_internal; +__wcstoll_l; +wcstoll_l; +wcstombs; +wcstoul; +__wcstoul_internal; +wcstoull; +__wcstoul_l; +wcstoul_l; +__wcstoull_internal; +__wcstoull_l; +wcstoull_l; +wmemcpy; +wmemmove; +wmempcpy; +wmemset; +wordexp; +__woverflow; +write; +writev; +__wuflow; +__wunderflow; +xdr_bool; +xdr_bytes; +xdr_char; +xdr_double; +xdr_enum; +xdr_float; +xdr_hyper; +xdr_int; +xdr_int16_t; +xdr_int32_t; +xdr_int64_t; +xdr_int8_t; +xdr_long; +xdr_longlong_t; +xdrmem_create; +xdr_quad_t; +xdr_short; +xdrstdio_create; +xdr_string; +xdr_u_char; +xdr_u_hyper; +xdr_u_int; +xdr_uint16_t; +xdr_uint32_t; +xdr_uint64_t; +xdr_uint8_t; +xdr_u_long; +xdr_u_longlong_t; +xdr_u_quad_t; +xdr_u_short; +__xpg_strerror_r; +__xstat; +__xstat64; + +# UDF allocator +UdfAllocate; +UdfFree; +UdfAllocateWithSize; +UdfFreeWithSize; +UdfArrowAllocate; +UdfArrowReallocate; +UdfArrowFree; + +# UDF terminator +UdfTerminate; +UdfRegisterObject; +UdfUnregisterObject; + +# PG functions +AggCheckCallContext; +AllocSetContextCreateInternal; +appendStringInfo; +appendStringInfoChar; +appendStringInfoString; +array_create_iterator; +array_free_iterator; +array_iterate; +ArrayGetNItems; +BlessTupleDesc; +BuildTupleFromCStrings; +CallerFInfoFunctionCall1; +CallerFInfoFunctionCall2; +check_stack_depth; +construct_array; +construct_md_array; +cstring_to_text_with_len; +cstring_to_text; +datumCopy; +DecodeDateTime; +deconstruct_array; +DecrTupleDescRefCount; +DirectFunctionCall1Coll; +DirectFunctionCall2Coll; +DirectFunctionCall3Coll; +DirectFunctionCall4Coll; +DirectFunctionCall5Coll; +EncodeDateOnly; +EncodeDateTime; +EncodeSpecialDate; +EncodeSpecialTimestamp; +EncodeTimeOnly; +end_MultiFuncCall; +errcode; +errdetail; +errfinish; +errhint; +errmsg_internal; +errmsg; +errstart_cold; +errstart; +exprType; +find_coercion_pathway; +free_attstatsslot; +get_attnum; +get_attstatsslot; +get_atttype; +get_call_result_type; +get_commutator; +get_element_type; +get_extension_oid; +get_extension_schema; +get_fn_expr_argtype; +get_func_name; +get_func_namespace; +get_namespace_name; +get_opfamily_member; +get_rel_name; +get_restriction_variable; +get_typbyval; +get_typlen; +get_typlenbyvalalign; +GetAttributeByNum; +getBaseType; +GetCurrentTransactionId; +GetDatabaseEncoding; +GetMemoryChunkContext; +getmissingattr; +GetSysCacheOid; +getTypeOutputInfo; +hash_bytes; +hash_create; +hash_search; +heap_form_tuple; +heap_freetuple; +heap_getsysattr; +HeapTupleHeaderGetDatum; +index_close; +index_open; +init_MultiFuncCall; +initStringInfo; +is_pseudo_constant_for_index; +j2date; +jsonb_in; +lappend; +list_concat; +list_make1_impl; +list_make2_impl; +lookup_rowtype_tupdesc; +LookupFuncName; +make_opclause; +makeFuncExpr; +makeString; +makeStringInfo; +MemoryContextAlloc; +MemoryContextAllocZero; +MemoryContextDelete; +MemoryContextRegisterResetCallback; +MemoryContextStrdup; +nocache_index_getattr; +nocachegetattr; +OidFunctionCall1Coll; +OidOutputFunctionCall; +ParseDateTime; +per_MultiFuncCall; +pg_detoast_datum_copy; +pg_detoast_datum_packed; +pg_detoast_datum_slice; +pg_detoast_datum; +pg_do_encoding_conversion; +pg_qsort; +pg_snprintf; +pg_sprintf; +pg_vsnprintf; +ProcessInterrupts; +quote_qualified_identifier; +ReadBuffer; +regclassin; +RelationClose; +RelationGetIndexList; +RelationIdGetRelation; +ReleaseBuffer; +ReleaseSysCache; +ScanKeyInit; +SearchSysCache1; +SearchSysCache2; +SearchSysCache3; +SPI_connect; +SPI_exec; +SPI_execute_with_args; +SPI_execute; +SPI_finish; +SPI_fnumber; +SPI_freetuptable; +SPI_getbinval; +SPI_gettype; +SPI_getvalue; +SPI_modifytuple; +SPI_palloc; +std_typanalyze; +systable_beginscan; +systable_endscan; +systable_getnext; +table_close; +table_open; +text_to_cstring; +textout; +timestamp2tm; +tm2timestamp; +toast_raw_datum_size; +TupleDescGetAttInMetadata; +type_is_rowtype; +TypenameGetTypid; +vacuum_delay_point; +WinGetCurrentPosition; +WinGetFuncArgCurrent; +WinGetFuncArgInPartition; +WinGetPartitionLocalMemory; +WinGetPartitionRowCount; +yql_escape_json; +yql_IsValidJsonNumber; +yql_palloc; +yql_palloc0; +yql_pfree; +yql_pstrdup; +yql_repalloc; + +# PG global vars +ImplPtrBufferBlocks; +ImplPtrCacheMemoryContext; +ImplPtrCurrentMemoryContext; +ImplPtrInterruptPending; +ImplPtrLocalBufferBlockPointers; +ImplPtrSPI_processed; +ImplPtrSPI_tuptable; + +# Extra PG API +yql_read_table; +yql_iterator_error; +yql_iterator_has_data; +yql_iterator_value; +yql_iterator_move; +yql_iterator_close; + +local: *; +}; + diff --git a/yql/essentials/tools/pg-make-test/ya.make b/yql/essentials/tools/pg-make-test/ya.make index 0134220b81..344ea2d3d2 100644 --- a/yql/essentials/tools/pg-make-test/ya.make +++ b/yql/essentials/tools/pg-make-test/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + PY3_PROGRAM(pg-make-test) PY_SRCS( @@ -19,3 +21,6 @@ RECURSE( update-test-status ) +ENDIF() + + diff --git a/yql/essentials/tools/pgrun/ya.make b/yql/essentials/tools/pgrun/ya.make index aca35e4167..fcad7dd776 100644 --- a/yql/essentials/tools/pgrun/ya.make +++ b/yql/essentials/tools/pgrun/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + PROGRAM(pgrun) ALLOCATOR(J) @@ -44,3 +46,6 @@ PEERDIR( YQL_LAST_ABI_VERSION() END() + +ENDIF() + diff --git a/yql/essentials/tools/purebench/ya.make b/yql/essentials/tools/purebench/ya.make index 962f3e1497..055d01743f 100644 --- a/yql/essentials/tools/purebench/ya.make +++ b/yql/essentials/tools/purebench/ya.make @@ -1,3 +1,5 @@ +IF (NOT OPENSOURCE) + PROGRAM(purebench) ALLOCATOR(J) @@ -29,3 +31,6 @@ PEERDIR( YQL_LAST_ABI_VERSION() END() + +ENDIF() + diff --git a/yql/essentials/tools/udf_probe/ya.make b/yql/essentials/tools/udf_probe/ya.make index 9cdacc34e9..d6cbedec45 100644 --- a/yql/essentials/tools/udf_probe/ya.make +++ b/yql/essentials/tools/udf_probe/ya.make @@ -7,7 +7,7 @@ SRCS( IF (OS_LINUX) # prevent external python extensions to lookup protobuf symbols (and maybe # other common stuff) in main binary - EXPORTS_SCRIPT(${ARCADIA_ROOT}/contrib/ydb/library/yql/tools/exports.symlist) + EXPORTS_SCRIPT(${ARCADIA_ROOT}/yql/essentials/tools/exports.symlist) ENDIF() PEERDIR( diff --git a/yql/essentials/tools/udf_resolver/ya.make b/yql/essentials/tools/udf_resolver/ya.make index 79b5de4b04..05240ec3b7 100644 --- a/yql/essentials/tools/udf_resolver/ya.make +++ b/yql/essentials/tools/udf_resolver/ya.make @@ -8,7 +8,7 @@ SRCS( IF (OS_LINUX) # prevent external python extensions to lookup protobuf symbols (and maybe # other common stuff) in main binary - EXPORTS_SCRIPT(${ARCADIA_ROOT}/contrib/ydb/library/yql/tools/exports.symlist) + EXPORTS_SCRIPT(${ARCADIA_ROOT}/yql/essentials/tools/exports.symlist) PEERDIR( contrib/libs/libc_compat ) |