diff --git a/docker/build_scripts/build_package.sh b/docker/build_scripts/build_package.sh
index 8dfcce2f235d1b750dc1ee5c14b5058889929737..c701166c0b24206d5836f34eb59a2c99229966c4 100755
--- a/docker/build_scripts/build_package.sh
+++ b/docker/build_scripts/build_package.sh
@@ -139,6 +139,33 @@ for python_executable in "${python_executables[@]}"; do
     old_dir=$PWD
     cd $openvds_path/dist
     LD_LIBRARY_PATH=$skbuild_dir/cmake-install/lib${libdir_suffix} auditwheel repair *.whl
+    manylinux_wheels=( $PWD/wheelhouse/*manylinux*.whl )
+    the_wheel=${manylinux_wheels[0]}
+    rm -rf tmp
+    mkdir tmp
+    cd tmp
+    unzip $the_wheel
+    data_dirs=( $PWD/*.data )
+    the_datadir=${data_dirs[0]}
+    cd $the_datadir/data
+    mkdir lib_new
+    cp ../../openvds.libs/* lib_new/
+    cd lib_new
+    the_openvds_lib_pattern=( libopenvds* )
+    the_openvds_lib=${the_openvds_lib_pattern[0]}
+    for ovds_link in ../lib${libdir_suffix}/libopenvds.so*; do
+      ln -s $the_openvds_lib $(basename $ovds_link)
+    done
+    cp -av ../lib${libdir_suffix}/libopenvds-java* .
+    cp -av ../lib${libdir_suffix}/libsegy* .
+    patchelf --set-rpath '$ORIGIN' *
+    cd ..
+    rm -rf lib${libdir_suffix}
+    mv lib_new lib${libdir_suffix}
+    cd $openvds_path/dist
+    rm $the_wheel
+    $base_dir/repair_wheel_extra tmp $the_wheel
+
     cp wheelhouse/*manylinux* $openvds_path/binpackage/$name-$openvds_version/
     mv wheelhouse/*manylinux* $openvds_path/binpackage/python/$distribution/
     cd $old_dir
diff --git a/docker/build_scripts/repair_wheel_extra b/docker/build_scripts/repair_wheel_extra
new file mode 100755
index 0000000000000000000000000000000000000000..4f71b79ecc3e85dad91857a6ee1ce3aade43c846
--- /dev/null
+++ b/docker/build_scripts/repair_wheel_extra
@@ -0,0 +1,105 @@
+#!/opt/_internal/pipx/venvs/auditwheel/bin/python
+# -*- coding: utf-8 -*-
+import zipfile
+import os
+from os.path import (join as pjoin, abspath, relpath, exists, sep as psep,
+                     splitext, dirname, basename)
+import glob
+import hashlib
+import csv
+from typing import Generator, Iterable, List, Optional, Type
+from base64 import urlsafe_b64encode
+
+import sys
+
+####all of this code is borrowed from auditwheel
+
+def _dist_info_dir(bdist_dir: str) -> str:
+    """Get the .dist-info directory from an unpacked wheel
+
+    Parameters
+    ----------
+    bdist_dir : str
+        Path of unpacked wheel file
+    """
+
+    info_dirs = glob.glob(pjoin(bdist_dir, '*.dist-info'))
+    if len(info_dirs) != 1:
+        raise WheelToolsError("Should be exactly one `*.dist_info` directory")
+    return info_dirs[0]
+
+def rewrite_record(bdist_dir: str) -> None:
+    """ Rewrite RECORD file with hashes for all files in `wheel_sdir`
+
+    Copied from :method:`wheel.bdist_wheel.bdist_wheel.write_record`
+
+    Will also unsign wheel
+
+    Parameters
+    ----------
+    bdist_dir : str
+        Path of unpacked wheel file
+    """
+    info_dir = _dist_info_dir(bdist_dir)
+    record_path = pjoin(info_dir, 'RECORD')
+    record_relpath = relpath(record_path, bdist_dir)
+    # Unsign wheel - because we're invalidating the record hash
+    sig_path = pjoin(info_dir, 'RECORD.jws')
+    if exists(sig_path):
+        os.unlink(sig_path)
+
+    def walk() -> Generator[str, None, None]:
+        for dir, dirs, files in os.walk(bdist_dir):
+            for f in files:
+                yield pjoin(dir, f)
+
+    def skip(path: str) -> bool:
+        """Wheel hashes every possible file."""
+        return path == record_relpath
+
+    with open(record_path, 'w+', newline='', encoding='utf-8') as record_file:
+        writer = csv.writer(record_file)
+        for path in walk():
+            relative_path = relpath(path, bdist_dir)
+            if skip(relative_path):
+                hash_ = ''
+                size = ''
+            else:
+                with open(path, 'rb') as f:
+                    data = f.read()
+                digest = hashlib.sha256(data).digest()
+                sha256 = urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
+                hash_ = f'sha256={sha256}'
+                size = f'{len(data)}'
+            record_path = relpath(path, bdist_dir).replace(psep, '/')
+            writer.writerow((record_path, hash_, size))
+
+def dir2zip(in_dir: str, zip_fname: str) -> None:
+    """ Make a zip file `zip_fname` with contents of directory `in_dir`
+
+    The recorded filenames are relative to `in_dir`, so doing a standard zip
+    unpack of the resulting `zip_fname` in an empty directory will result in
+    the original directory contents.
+
+    Parameters
+    ----------
+    in_dir : str
+        Directory path containing files to go in the zip archive
+    zip_fname : str
+        Filename of zip archive to write
+    """
+    with zipfile.ZipFile(zip_fname, 'w',
+                         compression=zipfile.ZIP_DEFLATED) as z:
+        for root, dirs, files in os.walk(in_dir):
+            for file in files:
+                fname = os.path.join(root, file)
+                out_fname = os.path.relpath(fname, in_dir)
+                z.write(os.path.join(root, file), out_fname)
+
+if len(sys.argv) != 3:
+    print("Usage: {} [bdist_dir] [output_name.whl]".format(sys.argv[0]))
+    exit(1)
+
+rewrite_record(sys.argv[1])
+dir2zip(sys.argv[1], sys.argv[2])
+
diff --git a/src/OpenVDS/IO/Linux_File.cpp b/src/OpenVDS/IO/Linux_File.cpp
index 26a5ec56401513cace71c908853e9b7e46a8438e..27d0354aeec8f25a14d8d8a0f2a1b936685bd8b9 100644
--- a/src/OpenVDS/IO/Linux_File.cpp
+++ b/src/OpenVDS/IO/Linux_File.cpp
@@ -182,7 +182,7 @@ public:
 bool File::Exists(const std::string& filename)
 {
   struct stat buf;
-  return (lstat(filename.c_str(), &buf) == 0) && S_ISREG(buf.st_mode);
+  return (stat(filename.c_str(), &buf) == 0) && S_ISREG(buf.st_mode);
 }
 
 bool File::Open(const std::string& filename, bool isCreate, bool isDestroyExisting, bool isWriteAccess, Error &error)