Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 01b4bf7a41 | |||
| 6b8684b1d7 | |||
| 2e50401a18 | |||
| f0a9e5f85d | |||
| 3d269303d9 | |||
| 0a972a5bce | |||
| e6e8b5c74a | |||
| b84d191632 | |||
| ffb99d6515 | |||
| c8407ff57b | |||
| dc7f625542 |
24
LICENSE
Normal file
24
LICENSE
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
Copyright (c) 2013, Roman Dobosz
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
* Neither the name of the organization nor the names of its contributors
|
||||||
|
may be used to endorse or promote products derived from this software
|
||||||
|
without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL ROMAN DOBOSZ BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||||
|
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
152
README.rst
152
README.rst
@@ -1,16 +1,154 @@
|
|||||||
=================================
|
========================
|
||||||
ulha extfs for Midnight Commander
|
Midnight Commander extfs
|
||||||
=================================
|
========================
|
||||||
|
|
||||||
This is Midnight Commander extfs plugin for handling lha/lzh archives.
|
Those are Midnight Commander extfs plugins for handling several archive types
|
||||||
It requires `lha <http://lha.sourceforge.jp>`_ free LHA implementation to work.
|
mostly known from AmigaOS - like **lha**, **lzx** and disk images like **adf**
|
||||||
|
and **dms**.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
See individual installation plugins below. Basically it comes down to:
|
||||||
|
|
||||||
|
* copying ``extfslib.py`` and plugin files to ``~/.local/share/mc/extfs.d/``
|
||||||
|
* installing binary handlers (lha, unlzx, xdms and unadf)
|
||||||
|
* adding an entry in ``~/.config/mc/mc.ext``::
|
||||||
|
|
||||||
|
# arch
|
||||||
|
regex/\.pattern$
|
||||||
|
Open=%cd %p/handler_filename://
|
||||||
|
|
||||||
|
ULha
|
||||||
|
====
|
||||||
|
|
||||||
|
ULha is an extfs plugin which can be used with lha/lzh/lharc archives.
|
||||||
|
Personally, I've use it almost exclusively for archives created long time ago
|
||||||
|
on my Amiga. Both reading from and writing into archive was implemented.
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
------------
|
||||||
|
|
||||||
|
ULha requires `free lha <http://lha.sourceforge.jp>`_ implementation to work.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
------------
|
------------
|
||||||
* copy ``ulha.py`` to ``~/.local/share/mc/extfs/ulha``
|
|
||||||
* add or change entry for lha/lzh files handle in ``~/.config/mc/mc.ext``::
|
* copy ``extfslib.py`` and ``ulha`` to ``~/.local/share/mc/extfs.d/``
|
||||||
|
* add or change entry for files handle in ``~/.config/mc/mc.ext``::
|
||||||
|
|
||||||
# lha
|
# lha
|
||||||
regex/\.[lL]([Hh][aA]|[Zz][hH])$
|
regex/\.[lL]([Hh][aA]|[Zz][hH])$
|
||||||
Open=%cd %p/ulha://
|
Open=%cd %p/ulha://
|
||||||
View=%view{ascii} lha l %f
|
View=%view{ascii} lha l %f
|
||||||
|
|
||||||
|
ULzx
|
||||||
|
====
|
||||||
|
|
||||||
|
ULzx is an extfs plugin which can be used to browse and extract lzx archives,
|
||||||
|
which are known almost exclusively from Amiga.
|
||||||
|
|
||||||
|
Due to limitations of
|
||||||
|
`unlzx <ftp://us.aminet.net/pub/aminet/misc/unix/unlzx.c.gz.readme>`_ tools,
|
||||||
|
only reading is supported. Also be aware, that
|
||||||
|
`unlzx <ftp://us.aminet.net/pub/aminet/misc/unix/unlzx.c.gz.readme>`_ cannot
|
||||||
|
extract files individually, so copying entire archive content is not
|
||||||
|
recommended, since on every single file a full archive extract would be
|
||||||
|
performed, which in the end would have impact on performance.
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
------------
|
||||||
|
|
||||||
|
ULzx requires
|
||||||
|
`unlzx <ftp://us.aminet.net/pub/aminet/misc/unix/unlzx.c.gz.readme>`_ tool.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
* copy ``extfslib.py`` and ``ulzx`` to ``~/.local/share/mc/extfs.d/``
|
||||||
|
* add or change entry for files handle in ``~/.config/mc/mc.ext``::
|
||||||
|
|
||||||
|
# lzx
|
||||||
|
regex/\.[lL][zZ][xX]$
|
||||||
|
Open=%cd %p/ulzx://
|
||||||
|
View=%view{ascii} unlzx -v %f
|
||||||
|
|
||||||
|
UAdf
|
||||||
|
====
|
||||||
|
|
||||||
|
UAdf is an extfs plugin suitable for reading .adf, .adz and .dms Amiga floppy
|
||||||
|
disk images. Due to limitations of the
|
||||||
|
`unadf <http://freecode.com/projects/unadf>`_, file access inside disk image is
|
||||||
|
read only.
|
||||||
|
|
||||||
|
In case of corrupted or no-dos images, message will be shown.
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
------------
|
||||||
|
|
||||||
|
It requires ``unadf`` utility from `ADFlib <https://github.com/lclevy/ADFlib>`_
|
||||||
|
repository, with included `that commit
|
||||||
|
<https://github.com/lclevy/ADFlib/commit/d36dc2f395f3e8fcee81f66bc86994e166b6140f>`_
|
||||||
|
in particular, which introduced separation between filename and comment
|
||||||
|
attribute on Amiga Fast File System.
|
||||||
|
|
||||||
|
If it turns out that your distribution doesn't provide proper version of ADFlib,
|
||||||
|
there will be a need for building it by hand.
|
||||||
|
|
||||||
|
It may be done by using following steps:
|
||||||
|
|
||||||
|
#. Grab the `sources
|
||||||
|
<http://http.debian.net/debian/pool/main/u/unadf/unadf_0.7.11a.orig.tar.gz>`_
|
||||||
|
and `patches
|
||||||
|
<http://http.debian.net/debian/pool/main/u/unadf/unadf_0.7.11a-3.debian.tar.gz>`_
|
||||||
|
from `Debian repository <http://packages.debian.org/sid/unadf>`_.
|
||||||
|
#. Extract ``unadf_0.7.11a-3.debian.tar.gz`` and ``unadf_0.7.11a.orig.tar.gz``
|
||||||
|
into some temporary directory::
|
||||||
|
|
||||||
|
$ mkdir temp
|
||||||
|
$ cd temp
|
||||||
|
$ tar zxf ~/Downloads/unadf_0.7.11a-3.debian.tar.gz
|
||||||
|
$ tar zxf ~/Downloads/unadf_0.7.11a.orig.tar.gz
|
||||||
|
$ cd unadf-0.7.11a
|
||||||
|
|
||||||
|
#. Apply Debian patches::
|
||||||
|
|
||||||
|
$ for i in `cat ../debian/patches/series`; do
|
||||||
|
> patch -Np1 < "../debian/patches/${i}"
|
||||||
|
> done
|
||||||
|
|
||||||
|
#. Apply the patch from extras directory::
|
||||||
|
|
||||||
|
$ patch -Np1 < [path_to_this_repo]/extras/unadf_separate_comment.patch
|
||||||
|
$ make
|
||||||
|
$ cp Demo/unadf [destination_path]
|
||||||
|
|
||||||
|
#. Place ``unadf`` binary under directory reachable by ``$PATH``.
|
||||||
|
|
||||||
|
For optional dms support, `xdms <http://zakalwe.fi/~shd/foss/xdms/>`_ utility is
|
||||||
|
needed.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
* copy ``extfslib.py`` and ``uadf`` to ``~/.local/share/mc/extfs.d/``
|
||||||
|
* add or change entry for files handle in ``~/.config/mc/mc.ext``::
|
||||||
|
|
||||||
|
# adf
|
||||||
|
type/^Amiga\ .* disk
|
||||||
|
Open=%cd %p/uadf://
|
||||||
|
View=%view{ascii} unadf -lr %f
|
||||||
|
|
||||||
|
# adz
|
||||||
|
regex/\.([aA][dD][zZ])$
|
||||||
|
Open=%cd %p/uadf://
|
||||||
|
|
||||||
|
# dms
|
||||||
|
regex/\.([dD][mM][sS])$
|
||||||
|
Open=%cd %p/uadf://
|
||||||
|
|
||||||
|
License
|
||||||
|
=======
|
||||||
|
|
||||||
|
This software is licensed under 3-clause BSD license. See LICENSE file for
|
||||||
|
details.
|
||||||
|
|||||||
241
extfslib.py
Normal file
241
extfslib.py
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
"""
|
||||||
|
extfslib is a library which contains Archive class to support writing extfs
|
||||||
|
plugins for Midnight Commander.
|
||||||
|
|
||||||
|
Tested against python 3.6 and mc 4.8.22
|
||||||
|
|
||||||
|
Changelog:
|
||||||
|
1.2 Switch to python3
|
||||||
|
1.1 Added item pattern, and common git/uid attrs
|
||||||
|
1.0 Initial release
|
||||||
|
|
||||||
|
Author: Roman 'gryf' Dobosz <gryf73@gmail.com>
|
||||||
|
Date: 2019-06-30
|
||||||
|
Version: 1.2
|
||||||
|
Licence: BSD
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
from subprocess import check_output, CalledProcessError
|
||||||
|
|
||||||
|
|
||||||
|
class Archive(object):
|
||||||
|
"""Archive handle. Provides interface to MC's extfs subsystem"""
|
||||||
|
LINE_PAT = re.compile(b"^(?P<size>)\s"
|
||||||
|
b"(?P<perms>)\s"
|
||||||
|
b"(?P<uid>)\s"
|
||||||
|
b"(?P<gid>)\s"
|
||||||
|
b"(?P<date>)\s+"
|
||||||
|
b"(?P<time>)\s"
|
||||||
|
b"(?P<fpath>)")
|
||||||
|
ARCHIVER = b"archiver_name"
|
||||||
|
CMDS = {"list": b"l",
|
||||||
|
"read": b"r",
|
||||||
|
"write": b"w",
|
||||||
|
"delete": b"d"}
|
||||||
|
ITEM = (b"%(perms)s 1 %(uid)-8s %(gid)-8s %(size)8s %(datetime)s "
|
||||||
|
b"%(display_name)s\n")
|
||||||
|
|
||||||
|
def __init__(self, fname):
|
||||||
|
"""Prepare archive content for operations"""
|
||||||
|
if not os.path.exists(fname):
|
||||||
|
raise OSError("No such file or directory `%s'" % fname)
|
||||||
|
self._uid = os.getuid()
|
||||||
|
self._gid = os.getgid()
|
||||||
|
self._arch = fname
|
||||||
|
self.name_map = {}
|
||||||
|
self._contents = self._get_dir()
|
||||||
|
|
||||||
|
def _map_name(self, name):
|
||||||
|
"""MC still have a bug in extfs subsystem, in case of filepaths with
|
||||||
|
leading space. This is workaround to this bug, which replaces leading
|
||||||
|
space with tilda."""
|
||||||
|
if name.startswith(b" "):
|
||||||
|
new_name = b"".join([b"~", name[1:]])
|
||||||
|
return new_name
|
||||||
|
return name
|
||||||
|
|
||||||
|
def _get_real_name(self, name):
|
||||||
|
"""Get real filepath of the file. See _map_name docstring for
|
||||||
|
details."""
|
||||||
|
for item in self._contents:
|
||||||
|
if item[b'display_name'] == name.encode('utf-8',
|
||||||
|
'surrogateescape'):
|
||||||
|
return item[b'fpath']
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_dir(self):
|
||||||
|
"""Prepare archive file listing. Expected keys which every entry
|
||||||
|
should have are: size, perms, uid, gid, date, time, fpath and
|
||||||
|
display_name."""
|
||||||
|
contents = []
|
||||||
|
|
||||||
|
out = self._call_command("list")
|
||||||
|
if not out:
|
||||||
|
return
|
||||||
|
|
||||||
|
for line in out.split(b"\n"):
|
||||||
|
match = self.LINE_PAT.match(line)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
entry = match.groupdict()
|
||||||
|
contents.append(entry)
|
||||||
|
|
||||||
|
return contents
|
||||||
|
|
||||||
|
def _call_command(self, cmd, src=None, dst=None):
|
||||||
|
"""
|
||||||
|
Return status of the provided command, which can be one of:
|
||||||
|
write
|
||||||
|
read
|
||||||
|
delete
|
||||||
|
list
|
||||||
|
"""
|
||||||
|
command = [self.ARCHIVER, self.CMDS.get(cmd), self._arch]
|
||||||
|
|
||||||
|
if src and dst:
|
||||||
|
command.append(src)
|
||||||
|
command.append(dst)
|
||||||
|
elif src or dst:
|
||||||
|
command.append(src and src or dst)
|
||||||
|
|
||||||
|
try:
|
||||||
|
output = check_output(command)
|
||||||
|
except CalledProcessError:
|
||||||
|
sys.exit(1)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def list(self):
|
||||||
|
"""Output contents of the archive to stdout"""
|
||||||
|
sys.stderr.write("Not supported")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def run(self, dst):
|
||||||
|
"""Execute file out of archive"""
|
||||||
|
sys.stderr.write("Not supported")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def copyout(self, src, dst):
|
||||||
|
"""Copy file out of archive"""
|
||||||
|
sys.stderr.write("Not supported")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def rm(self, dst):
|
||||||
|
"""Remove file from archive"""
|
||||||
|
sys.stderr.write("Not supported")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def mkdir(self, dst):
|
||||||
|
"""Create empty directory in archive"""
|
||||||
|
sys.stderr.write("Not supported")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def rmdir(self, dst):
|
||||||
|
"""Removes directory from archive"""
|
||||||
|
sys.stderr.write("Not supported")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def copyin(self, dst, src=None):
|
||||||
|
"""Copy file to the archive"""
|
||||||
|
sys.stderr.write("Not supported")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def usage():
|
||||||
|
"""Print out usage information"""
|
||||||
|
print ("Usage: %(prg)s {copyin,copyout} ARCHNAME SOURCE DESTINATION\n"
|
||||||
|
"or: %(prg)s list ARCHNAME\n"
|
||||||
|
"or: %(prg)s {mkdir,rm,rmdir,run} ARCHNAME TARGET" %
|
||||||
|
{"prg": sys.argv[0]})
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_args(arch_class):
|
||||||
|
"""Use ArgumentParser to check for script arguments and execute."""
|
||||||
|
|
||||||
|
CALL_MAP = {'list': lambda a: arch_class(a.arch).list(),
|
||||||
|
'copyin': lambda a: arch_class(a.arch).copyin(a.src, a.dst),
|
||||||
|
'copyout': lambda a: arch_class(a.arch).copyout(a.src, a.dst),
|
||||||
|
'mkdir': lambda a: arch_class(a.arch).mkdir(a.dst),
|
||||||
|
'rm': lambda a: arch_class(a.arch).rm(a.dst),
|
||||||
|
'run': lambda a: arch_class(a.arch).run(a.dst)}
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
subparsers = parser.add_subparsers(help='supported commands')
|
||||||
|
parser_list = subparsers.add_parser('list', help="List contents of "
|
||||||
|
"archive")
|
||||||
|
parser_copyin = subparsers.add_parser('copyin', help="Copy file into "
|
||||||
|
"archive")
|
||||||
|
parser_copyout = subparsers.add_parser('copyout', help="Copy file out of "
|
||||||
|
"archive")
|
||||||
|
parser_rm = subparsers.add_parser('rm', help="Delete file from archive")
|
||||||
|
parser_mkdir = subparsers.add_parser('mkdir', help="Create directory in "
|
||||||
|
"archive")
|
||||||
|
parser_run = subparsers.add_parser('run', help="Execute archived file")
|
||||||
|
|
||||||
|
parser_list.add_argument('arch', help="Archive filename")
|
||||||
|
parser_list.set_defaults(func=CALL_MAP['list'])
|
||||||
|
|
||||||
|
parser_copyin.add_argument('arch', help="Archive filename")
|
||||||
|
parser_copyin.add_argument('src', help="Source filename")
|
||||||
|
parser_copyin.add_argument('dst', help="Destination filename (to be "
|
||||||
|
"written into archive)")
|
||||||
|
parser_copyin.set_defaults(func=CALL_MAP['copyin'])
|
||||||
|
|
||||||
|
parser_copyout.add_argument('arch', help="D64 Image filename")
|
||||||
|
parser_copyout.add_argument('src', help="Source filename (to be read from"
|
||||||
|
" archive")
|
||||||
|
parser_copyout.add_argument('dst', help="Destination filename")
|
||||||
|
parser_copyout.set_defaults(func=CALL_MAP['copyout'])
|
||||||
|
|
||||||
|
parser_rm.add_argument('arch', help="D64 Image filename")
|
||||||
|
parser_rm.add_argument('dst', help="File inside archive to be deleted")
|
||||||
|
parser_rm.set_defaults(func=CALL_MAP['rm'])
|
||||||
|
|
||||||
|
parser_mkdir.add_argument('arch', help="archive filename")
|
||||||
|
parser_mkdir.add_argument('dst', help="Directory name inside archive to "
|
||||||
|
"be created")
|
||||||
|
parser_mkdir.set_defaults(func=CALL_MAP['mkdir'])
|
||||||
|
|
||||||
|
parser_run.add_argument('arch', help="archive filename")
|
||||||
|
parser_run.add_argument('dst', help="File to be executed")
|
||||||
|
parser_run.set_defaults(func=CALL_MAP['run'])
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
return args.func(args)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args(arch_class):
|
||||||
|
"""Retrive and parse arguments from commandline and apply them into passed
|
||||||
|
arch_class class object."""
|
||||||
|
try:
|
||||||
|
if sys.argv[1] not in ('list', 'copyin', 'copyout', 'rm', 'mkdir',
|
||||||
|
"run", "rmdir"):
|
||||||
|
usage()
|
||||||
|
sys.exit(2)
|
||||||
|
except IndexError:
|
||||||
|
usage()
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
arch = src = dst = None
|
||||||
|
try:
|
||||||
|
arch = sys.argv[2]
|
||||||
|
if sys.argv[1] in ('copyin', 'copyout'):
|
||||||
|
src = sys.argv[3]
|
||||||
|
dst = sys.argv[4]
|
||||||
|
elif sys.argv[1] in ('rm', 'rmdir', 'run', 'mkdir'):
|
||||||
|
dst = sys.argv[3]
|
||||||
|
except IndexError:
|
||||||
|
usage()
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
call_map = {'copyin': lambda a, s, d: arch_class(a).copyin(s, d),
|
||||||
|
'copyout': lambda a, s, d: arch_class(a).copyout(s, d),
|
||||||
|
'list': lambda a, s, d: arch_class(a).list(),
|
||||||
|
'mkdir': lambda a, s, d: arch_class(a).mkdir(d),
|
||||||
|
'rm': lambda a, s, d: arch_class(a).rm(d),
|
||||||
|
'rmdir': lambda a, s, d: arch_class(a).rmdir(d),
|
||||||
|
'run': lambda a, s, d: arch_class(a).run(d)}
|
||||||
|
|
||||||
|
return call_map[sys.argv[1]](arch, src, dst)
|
||||||
122
extras/unadf_separate_comment.patch
Normal file
122
extras/unadf_separate_comment.patch
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
diff -ur unadf-0.7.11a.orig/Demo/unadf.c unadf-0.7.11a/Demo/unadf.c
|
||||||
|
--- unadf-0.7.11a.orig/Demo/unadf.c 2013-05-12 17:59:51.214905177 +0200
|
||||||
|
+++ unadf-0.7.11a/Demo/unadf.c 2013-05-12 17:50:06.843420519 +0200
|
||||||
|
@@ -58,6 +58,7 @@
|
||||||
|
puts(" -r : lists directory tree contents");
|
||||||
|
puts(" -c : use dircache data (must be used with -l)");
|
||||||
|
puts(" -s : display entries logical block pointer (must be used with -l)");
|
||||||
|
+ puts(" -m : display file comments, if exists (must be used with -l)");
|
||||||
|
putchar('\n');
|
||||||
|
puts(" -v n : mount volume #n instead of default #0 volume");
|
||||||
|
putchar('\n');
|
||||||
|
@@ -65,7 +66,8 @@
|
||||||
|
puts(" -d dir : extract to 'dir' directory");
|
||||||
|
}
|
||||||
|
|
||||||
|
-void printEnt(struct Volume *vol, struct Entry* entry, char *path, BOOL sect)
|
||||||
|
+void printEnt(struct Volume *vol, struct Entry* entry, char *path, BOOL sect,
|
||||||
|
+ BOOL comment)
|
||||||
|
{
|
||||||
|
/* do not print the links entries, ADFlib do not support them yet properly */
|
||||||
|
if (entry->type==ST_LFILE || entry->type==ST_LDIR || entry->type==ST_LSOFT)
|
||||||
|
@@ -89,7 +91,7 @@
|
||||||
|
printf("%s/",entry->name);
|
||||||
|
else
|
||||||
|
printf("%s",entry->name);
|
||||||
|
- if (entry->comment!=NULL && strlen(entry->comment)>0)
|
||||||
|
+ if (comment && entry->comment!=NULL && strlen(entry->comment)>0)
|
||||||
|
printf(", %s",entry->comment);
|
||||||
|
putchar('\n');
|
||||||
|
|
||||||
|
@@ -199,13 +201,14 @@
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
-void printTree(struct Volume *vol, struct List* tree, char* path, BOOL sect)
|
||||||
|
+void printTree(struct Volume *vol, struct List* tree, char* path, BOOL sect,
|
||||||
|
+ BOOL comment)
|
||||||
|
{
|
||||||
|
char *buf;
|
||||||
|
struct Entry* entry;
|
||||||
|
|
||||||
|
while(tree) {
|
||||||
|
- printEnt(vol, tree->content, path, sect);
|
||||||
|
+ printEnt(vol, tree->content, path, sect, comment);
|
||||||
|
if (tree->subdir!=NULL) {
|
||||||
|
entry = (struct Entry*)tree->content;
|
||||||
|
if (strlen(path)>0) {
|
||||||
|
@@ -215,11 +218,11 @@
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
sprintf(buf,"%s/%s", path, entry->name);
|
||||||
|
- printTree(vol, tree->subdir, buf, sect);
|
||||||
|
+ printTree(vol, tree->subdir, buf, sect, comment);
|
||||||
|
free(buf);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
- printTree(vol, tree->subdir, entry->name, sect);
|
||||||
|
+ printTree(vol, tree->subdir, entry->name, sect, comment);
|
||||||
|
}
|
||||||
|
tree = tree->next;
|
||||||
|
}
|
||||||
|
@@ -370,12 +373,10 @@
|
||||||
|
int main(int argc, char* argv[])
|
||||||
|
{
|
||||||
|
int i, j;
|
||||||
|
- BOOL rflag, lflag, xflag, cflag, vflag, sflag, dflag, pflag, qflag;
|
||||||
|
+ BOOL rflag, lflag, xflag, cflag, vflag, sflag, dflag, pflag, qflag, mflag;
|
||||||
|
struct List* files, *rtfiles;
|
||||||
|
char *devname, *dirname;
|
||||||
|
- char strbuf[80];
|
||||||
|
unsigned char *extbuf;
|
||||||
|
- int vInd, dInd, fInd, aInd;
|
||||||
|
BOOL nextArg;
|
||||||
|
|
||||||
|
struct Device *dev;
|
||||||
|
@@ -389,8 +390,7 @@
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
- rflag = lflag = cflag = vflag = sflag = dflag = pflag = qflag = FALSE;
|
||||||
|
- vInd = dInd = fInd = aInd = -1;
|
||||||
|
+ rflag = lflag = cflag = vflag = sflag = dflag = pflag = qflag = mflag = FALSE;
|
||||||
|
xflag = TRUE;
|
||||||
|
dirname = NULL;
|
||||||
|
devname = NULL;
|
||||||
|
@@ -430,6 +430,9 @@
|
||||||
|
case 's':
|
||||||
|
sflag = TRUE;
|
||||||
|
break;
|
||||||
|
+ case 'm':
|
||||||
|
+ mflag = TRUE;
|
||||||
|
+ break;
|
||||||
|
case 'c':
|
||||||
|
cflag = TRUE;
|
||||||
|
break;
|
||||||
|
@@ -522,13 +525,13 @@
|
||||||
|
if (!rflag) {
|
||||||
|
cell = list = adfGetDirEnt(vol,vol->curDirPtr);
|
||||||
|
while(cell) {
|
||||||
|
- printEnt(vol,cell->content,"", sflag);
|
||||||
|
+ printEnt(vol,cell->content,"", sflag, mflag);
|
||||||
|
cell = cell->next;
|
||||||
|
}
|
||||||
|
adfFreeDirList(list);
|
||||||
|
} else {
|
||||||
|
cell = list = adfGetRDirEnt(vol,vol->curDirPtr,TRUE);
|
||||||
|
- printTree(vol,cell,"", sflag);
|
||||||
|
+ printTree(vol,cell,"", sflag, mflag);
|
||||||
|
adfFreeDirList(list);
|
||||||
|
}
|
||||||
|
}else if (xflag) {
|
||||||
|
diff -ur unadf-0.7.11a.orig/Demo/unadf.usage unadf-0.7.11a/Demo/unadf.usage
|
||||||
|
--- unadf-0.7.11a.orig/Demo/unadf.usage 2006-12-03 15:27:00.000000000 +0100
|
||||||
|
+++ unadf-0.7.11a/Demo/unadf.usage 2013-05-12 17:40:23.116966854 +0200
|
||||||
|
@@ -3,6 +3,7 @@
|
||||||
|
-r : lists directory tree contents
|
||||||
|
-c : use dircache data (must be used with -l)
|
||||||
|
-s : display entries logical block pointer (must be used with -l)
|
||||||
|
+ -m : display file comments, if exists (must be used with -l)
|
||||||
|
|
||||||
|
-v n : mount volume #n instead of default #0 volume
|
||||||
|
|
||||||
187
uadf
Executable file
187
uadf
Executable file
@@ -0,0 +1,187 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
UADF Virtual filesystem
|
||||||
|
|
||||||
|
This extfs provides quick and dirty read-only access to disk image files for
|
||||||
|
the Commodore Amiga adf or adz (gzipped adfs) and dms.
|
||||||
|
|
||||||
|
It requires the unadf utility, unfortunately there is no original sources,
|
||||||
|
since authors page doesn't exists anymore. Luckily, there is a copy of the
|
||||||
|
source (and useful patches) in Debian repository:
|
||||||
|
http://packages.debian.org/sid/unadf
|
||||||
|
|
||||||
|
There should be one change made to the source of unadf, though. While using
|
||||||
|
"-lr" switch it by default also displays comments, separated by the comma.
|
||||||
|
However there is no way to distinguish where filename ends and comment starts,
|
||||||
|
if comment or filename already contains any comma.
|
||||||
|
|
||||||
|
The patched sources are available from: https://github.com/lclevy/ADFlib
|
||||||
|
|
||||||
|
It also requires xdms utility, for optional dms support.
|
||||||
|
|
||||||
|
Changelog:
|
||||||
|
1.3 Switch to Python3
|
||||||
|
1.2 Added failsafe for filenames in archive with spaces and nodos message.
|
||||||
|
1.1 Moved common code into extfslib library
|
||||||
|
1.0 Initial release
|
||||||
|
|
||||||
|
Author: Roman 'gryf' Dobosz <gryf73@gmail.com>
|
||||||
|
Date: 2019-06-30
|
||||||
|
Version: 1.3
|
||||||
|
Licence: BSD
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import gzip
|
||||||
|
from subprocess import check_output, check_call, CalledProcessError
|
||||||
|
from tempfile import mkstemp, mkdtemp
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from extfslib import Archive, parse_args
|
||||||
|
|
||||||
|
|
||||||
|
class UAdf(Archive):
|
||||||
|
"""
|
||||||
|
Class for interact with c1541 program and MC
|
||||||
|
"""
|
||||||
|
LINE_PAT = re.compile(b'\s*(?P<size>\d+)?'
|
||||||
|
b'\s{2}(?P<date>\d{4}/\d{2}/\d{2})'
|
||||||
|
b'\s{2}\s?(?P<time>\d+:\d{2}:\d{2})'
|
||||||
|
b'\s{2}(?P<fpath>.*)')
|
||||||
|
ARCHIVER = b"unadf"
|
||||||
|
DMS = b"xdms"
|
||||||
|
CMDS = {"list": b"-lr",
|
||||||
|
"read": b"r",
|
||||||
|
"write": b"w",
|
||||||
|
"delete": b"d"}
|
||||||
|
DATETIME = b"%s-%s-%s %02d:%s"
|
||||||
|
|
||||||
|
def __init__(self, fname):
|
||||||
|
"""Prepare archive content for operations"""
|
||||||
|
self._clean = True
|
||||||
|
self._arch = fname
|
||||||
|
|
||||||
|
if fname.lower().endswith(".adz"):
|
||||||
|
self._ungzip()
|
||||||
|
|
||||||
|
if fname.lower().endswith(".dms"):
|
||||||
|
self._undms()
|
||||||
|
|
||||||
|
super(UAdf, self).__init__(self._arch)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
"""Cleanup"""
|
||||||
|
if not self._clean:
|
||||||
|
try:
|
||||||
|
os.unlink(self._arch)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _parse_dt(self, date, time):
|
||||||
|
"""Return parsed datetime which fulfill extfs standards date."""
|
||||||
|
year, month, day = date.split(b"/")
|
||||||
|
hours, minutes, _unused = time.split(b":")
|
||||||
|
return self.DATETIME % (month, day, year, int(hours), minutes)
|
||||||
|
|
||||||
|
def _ungzip(self):
|
||||||
|
"""Create temporary file for ungzipped adf file since unadf does not
|
||||||
|
accept gzipped content in any way including reading from stdin."""
|
||||||
|
fdesc, tmp_fname = mkstemp(suffix=".adf")
|
||||||
|
os.close(fdesc)
|
||||||
|
|
||||||
|
with gzip.open(self._arch) as gobj:
|
||||||
|
with open(tmp_fname, "wb") as fobj:
|
||||||
|
fobj.write(gobj.read())
|
||||||
|
self._arch = tmp_fname
|
||||||
|
self._clean = False
|
||||||
|
|
||||||
|
def _undms(self):
|
||||||
|
"""Create temporary adf file extracted from dms."""
|
||||||
|
fdesc, tmp_fname = mkstemp(suffix=".adf")
|
||||||
|
os.close(fdesc)
|
||||||
|
|
||||||
|
try:
|
||||||
|
check_call([self.DMS, b'-q', b'u', self._arch, "+" + tmp_fname])
|
||||||
|
self._arch = tmp_fname
|
||||||
|
self._clean = False
|
||||||
|
except (CalledProcessError, OSError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_dir(self):
|
||||||
|
"""Retrieve directory"""
|
||||||
|
contents = []
|
||||||
|
with open(os.devnull, "w") as fnull:
|
||||||
|
try:
|
||||||
|
out = check_output([self.ARCHIVER, self.CMDS['list'],
|
||||||
|
self._arch], stderr=fnull)
|
||||||
|
except CalledProcessError:
|
||||||
|
return contents
|
||||||
|
|
||||||
|
for line in out.split(b"\n"):
|
||||||
|
match = self.LINE_PAT.match(line)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
|
||||||
|
match_entry = match.groupdict()
|
||||||
|
entry = {}
|
||||||
|
for key in match_entry:
|
||||||
|
entry[bytes(key, 'utf-8')] = match_entry[key]
|
||||||
|
del match_entry
|
||||||
|
|
||||||
|
entry[b'perms'] = b"-rw-r--r--"
|
||||||
|
if not entry[b'size']:
|
||||||
|
entry[b'perms'] = b"drwxr-xr-x"
|
||||||
|
entry[b'size'] = b"0"
|
||||||
|
entry[b'display_name'] = self._map_name(entry[b'fpath'])
|
||||||
|
entry[b'datetime'] = self._parse_dt(entry[b'date'], entry[b'time'])
|
||||||
|
entry[b'uid'] = bytes(str(self._uid), 'utf-8')
|
||||||
|
entry[b'gid'] = bytes(str(self._gid), 'utf-8')
|
||||||
|
contents.append(entry)
|
||||||
|
|
||||||
|
return contents
|
||||||
|
|
||||||
|
def list(self):
|
||||||
|
"""
|
||||||
|
Output list contents of adf image.
|
||||||
|
Convert filenames to be Unix filesystem friendly
|
||||||
|
Add suffix to show user what kind of file do he dealing with.
|
||||||
|
"""
|
||||||
|
if not self._contents:
|
||||||
|
sys.stderr.write("Nodos or archive error\n")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
for entry in self._contents:
|
||||||
|
sys.stdout.buffer.write(self.ITEM % entry)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def copyout(self, src, dst):
|
||||||
|
"""Copy file form the adf image."""
|
||||||
|
real_src = self._get_real_name(src)
|
||||||
|
if not real_src:
|
||||||
|
raise IOError("No such file or directory")
|
||||||
|
|
||||||
|
if b" " in real_src:
|
||||||
|
sys.stderr.write("unadf is unable to operate on filepath with "
|
||||||
|
"space inside.\nUse affs to mount image and than"
|
||||||
|
" extract desired files.\n")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
extract_dir = mkdtemp()
|
||||||
|
cmd = [self.ARCHIVER, self._arch, real_src, b"-d", extract_dir]
|
||||||
|
if check_call(cmd, stdout=open(os.devnull, 'wb'),
|
||||||
|
stderr=open(os.devnull, 'wb')) != 0:
|
||||||
|
shutil.rmtree(extract_dir)
|
||||||
|
sys.stderr.write("unadf returned with nonzero exit code\n")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
shutil.move(os.path.join(bytes(extract_dir, "utf8"), real_src),
|
||||||
|
bytes(dst, "utf8"))
|
||||||
|
shutil.rmtree(extract_dir)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(parse_args(UAdf))
|
||||||
275
ulha
275
ulha
@@ -1,166 +1,97 @@
|
|||||||
#! /usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
"""
|
"""
|
||||||
Lha Virtual filesystem executive for Midnight Commander.
|
Lha Virtual filesystem executive for Midnight Commander.
|
||||||
|
|
||||||
Tested against python 2.7, lha[1] 1.14 and mc 4.8.7
|
Tested against python 3.6, lha[1] 1.14 and mc 4.8.22
|
||||||
|
|
||||||
[1] http://lha.sourceforge.jp
|
[1] http://lha.sourceforge.jp
|
||||||
|
|
||||||
Changelog:
|
Changelog:
|
||||||
|
1.3 Switch to python3
|
||||||
|
1.2 Moved item pattern to extfslib module
|
||||||
|
1.1 Moved common code into extfslib library
|
||||||
1.0 Initial release
|
1.0 Initial release
|
||||||
|
|
||||||
Author: Roman 'gryf' Dobosz <gryf73@gmail.com>
|
Author: Roman 'gryf' Dobosz <gryf73@gmail.com>
|
||||||
Date: 2013-05-05
|
Date: 2019-06-30
|
||||||
Version: 1.0
|
Version: 1.3
|
||||||
Licence: BSD
|
Licence: BSD
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
from subprocess import call, check_call, check_output, CalledProcessError
|
from subprocess import call, check_call, CalledProcessError
|
||||||
from tempfile import mkdtemp, mkstemp
|
from tempfile import mkdtemp, mkstemp
|
||||||
|
|
||||||
|
from extfslib import Archive, parse_args
|
||||||
# Define which archiver you are using with appropriate options
|
|
||||||
ARCHIVER = "lha"
|
|
||||||
CMDS = {"list": "lq",
|
|
||||||
"read": "pq",
|
|
||||||
"write": "aq",
|
|
||||||
"delete": "dq"}
|
|
||||||
|
|
||||||
LINE_LHD = re.compile("^(?P<perms>[d-][rswx-]{9})"
|
|
||||||
"\s+(?P<uid>\d+)/"
|
|
||||||
"(?P<gid>\d+)"
|
|
||||||
"\s+(?P<size>\d+)"
|
|
||||||
"\s+(\*{6}|\d+\.\d%)"
|
|
||||||
"\s(?P<month>[JFMASOND][a-z]{2})\s+" # month
|
|
||||||
"(?P<day>\d+)\s+" # day
|
|
||||||
"(?P<yh>\d{4}|(\d{2}:\d{2}))" # year/hour
|
|
||||||
"\s(?P<fpath>.*)")
|
|
||||||
|
|
||||||
LINE_LHx = re.compile("^(?P<perms>(\[generic\])|(\[unknown\])|([d-][rswx-]{9}))"
|
|
||||||
"\s+(?P<size>\d+)"
|
|
||||||
"\s+(\*{6}|\d+\.\d%)"
|
|
||||||
"\s(?P<month>[JFMASOND][a-z]{2})\s+" # month
|
|
||||||
"(?P<day>\d+)\s+" # day
|
|
||||||
"(?P<yh>\d{4}|(\d{2}:\d{2}))" # year/hour
|
|
||||||
"\s(?P<fpath>.*)")
|
|
||||||
|
|
||||||
|
|
||||||
class Archive(object):
|
class ULha(Archive):
|
||||||
"""Archive handle. Provides interface to MC's extfs subsystem"""
|
"""Archive handle. Provides interface to MC's extfs subsystem"""
|
||||||
def __init__(self, fname):
|
|
||||||
"""Prepare archive content for operations"""
|
|
||||||
self._filemap = {}
|
|
||||||
self._arch = fname
|
|
||||||
self._pattern = None
|
|
||||||
self._uid = str(os.getuid())
|
|
||||||
self._gid = str(os.getgid())
|
|
||||||
|
|
||||||
self._contents = self._get_dir()
|
LINE_PAT = re.compile(b"^((?P<perms>[d-][rswx-]{9})|(\[generic\])|"
|
||||||
|
b"(\[unknown\]))"
|
||||||
def _identify(self):
|
b"((\s+\d+/\d+\s+)|(\s+))"
|
||||||
"""Check for lha header"""
|
b"(?P<uid>)(?P<gid>)" # just for the record
|
||||||
pat_map = {"-lhd-": LINE_LHD,
|
b"(?P<size>\d+)"
|
||||||
"-lh0-": LINE_LHx,
|
b"\s+(\*{6}|\d+\.\d%)"
|
||||||
"-lh1-": LINE_LHx,
|
b"\s(?P<month>[JFMASOND][a-z]{2})\s+" # month
|
||||||
"-lh5-": LINE_LHx,
|
b"(?P<day>\d+)\s+" # day
|
||||||
"-lh6-": LINE_LHx}
|
b"(?P<yh>\d{4}|(\d{2}:\d{2}))" # year/hour
|
||||||
fobj = open(self._arch)
|
b"\s(?P<fpath>.*)")
|
||||||
fobj.seek(2)
|
ARCHIVER = b"lha"
|
||||||
ident = fobj.read(5)
|
CMDS = {"list": b"lq",
|
||||||
fobj.close()
|
"read": b"pq",
|
||||||
return pat_map[ident]
|
"write": b"aq",
|
||||||
|
"delete": b"dq"}
|
||||||
def _map_name(self, name):
|
DATETIME = b"%(month)s %(day)s %(yh)s"
|
||||||
"""MC still have a bug in extfs subsystem, in case of filepaths with
|
|
||||||
leading space. This is workaround to this bug, which replaces leading
|
|
||||||
space with tilda. Real name is remembered in _filemap attribute and
|
|
||||||
used in real operations."""
|
|
||||||
if name.startswith(" "):
|
|
||||||
new_name = "".join(["~", name[1:]])
|
|
||||||
self._filemap[new_name] = name
|
|
||||||
return new_name
|
|
||||||
return name
|
|
||||||
|
|
||||||
def _get_real_name(self, name):
|
|
||||||
"""Get real filepath of the file. See _map_name docstring for
|
|
||||||
details."""
|
|
||||||
new_name = self._filemap.get(name)
|
|
||||||
if new_name:
|
|
||||||
return new_name
|
|
||||||
return name
|
|
||||||
|
|
||||||
def _get_dir(self):
|
def _get_dir(self):
|
||||||
"""Prepare archive file listing"""
|
"""Prepare archive file listing"""
|
||||||
if not self._pattern:
|
|
||||||
self._pattern = self._identify()
|
|
||||||
|
|
||||||
self._filemap = {}
|
|
||||||
contents = []
|
contents = []
|
||||||
|
|
||||||
if self._pattern == LINE_LHx:
|
|
||||||
perms = "-rw-r--r--"
|
|
||||||
|
|
||||||
out = self._call_command("list")
|
out = self._call_command("list")
|
||||||
if not out:
|
if not out:
|
||||||
return
|
return
|
||||||
|
|
||||||
for line in out.split("\n"):
|
for line in out.split(b"\n"):
|
||||||
if line.endswith("/"):
|
# -lhd- can store empty directories
|
||||||
|
perms = b"-rw-r--r--"
|
||||||
|
if line.endswith(bytes(os.path.sep, 'utf-8')):
|
||||||
line = line[:-1]
|
line = line[:-1]
|
||||||
if self._pattern == LINE_LHx:
|
perms = b"drw-r--r--"
|
||||||
perms = "drw-r--r--"
|
|
||||||
|
|
||||||
match = self._pattern.match(line)
|
match = self.LINE_PAT.match(line)
|
||||||
if not match:
|
if not match:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
entry = match.groupdict()
|
match_entry = match.groupdict()
|
||||||
|
entry = {}
|
||||||
|
for key in match_entry:
|
||||||
|
entry[bytes(key, 'utf-8')] = match_entry[key]
|
||||||
|
del match_entry
|
||||||
# UID and GID sometimes can have strange values depending on
|
# UID and GID sometimes can have strange values depending on
|
||||||
# the information that was written into archive. Most of the
|
# the information that was written into archive. Most of the
|
||||||
# times I was dealing with Amiga lha archives, so that i don't
|
# times I was dealing with Amiga lha archives, so that i don't
|
||||||
# really care about real user/group
|
# really care about real user/group
|
||||||
entry['uid'] = self._uid
|
|
||||||
entry['gid'] = self._gid
|
|
||||||
|
|
||||||
if self._pattern == LINE_LHx:
|
entry[b'uid'] = bytes(str(self._uid), 'utf-8')
|
||||||
entry['perms'] = perms
|
entry[b'gid'] = bytes(str(self._gid), 'utf-8')
|
||||||
|
entry[b'datetime'] = self.DATETIME % entry
|
||||||
|
|
||||||
entry['display_name'] = self._map_name(entry['fpath'])
|
if not entry[b'perms']:
|
||||||
|
entry[b'perms'] = perms
|
||||||
|
|
||||||
|
entry[b'display_name'] = self._map_name(entry[b'fpath'])
|
||||||
contents.append(entry)
|
contents.append(entry)
|
||||||
|
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
def _call_command(self, cmd, src=None, dst=None):
|
|
||||||
"""
|
|
||||||
Return status of the provided command, which can be one of:
|
|
||||||
write
|
|
||||||
read
|
|
||||||
delete
|
|
||||||
list
|
|
||||||
"""
|
|
||||||
command = [ARCHIVER, CMDS.get(cmd), self._arch]
|
|
||||||
|
|
||||||
if src and dst:
|
|
||||||
command.append(src)
|
|
||||||
command.append(dst)
|
|
||||||
elif src or dst:
|
|
||||||
command.append(src and src or dst)
|
|
||||||
|
|
||||||
try:
|
|
||||||
output = check_output(command)
|
|
||||||
except CalledProcessError:
|
|
||||||
return None
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
def list(self):
|
def list(self):
|
||||||
"""Output contents of the archive to stdout"""
|
"""Output contents of the archive to stdout"""
|
||||||
for entry in self._contents:
|
for entry in self._contents:
|
||||||
sys.stdout.write("%(perms)s 1 %(uid)-8s %(gid)-8s %(size)8s "
|
sys.stdout.buffer.write(self.ITEM % entry)
|
||||||
"%(month)s %(day)s %(yh)s %(display_name)s\n" %
|
|
||||||
entry)
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def rm(self, dst):
|
def rm(self, dst):
|
||||||
@@ -172,6 +103,17 @@ class Archive(object):
|
|||||||
return 1
|
return 1
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
def rmdir(self, dst):
|
||||||
|
"""Remove empty directory"""
|
||||||
|
dst = self._get_real_name(dst)
|
||||||
|
|
||||||
|
if not dst.endswith(bytes(os.path.sep, 'utf-8')):
|
||||||
|
dst += bytes(os.path.sep, 'utf-8')
|
||||||
|
|
||||||
|
if self._call_command('delete', dst=dst) is None:
|
||||||
|
return 1
|
||||||
|
return 0
|
||||||
|
|
||||||
def run(self, dst):
|
def run(self, dst):
|
||||||
"""Execute file out of archive"""
|
"""Execute file out of archive"""
|
||||||
fdesc, tmp_file = mkstemp()
|
fdesc, tmp_file = mkstemp()
|
||||||
@@ -207,12 +149,14 @@ class Archive(object):
|
|||||||
os.chdir(tmpdir)
|
os.chdir(tmpdir)
|
||||||
if src:
|
if src:
|
||||||
os.makedirs(os.path.dirname(dst))
|
os.makedirs(os.path.dirname(dst))
|
||||||
os.link(src, dst)
|
shutil.copy2(src, dst)
|
||||||
else:
|
else:
|
||||||
os.makedirs(dst)
|
os.makedirs(dst)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = check_call([ARCHIVER, CMDS["write"], arch_abspath, dst])
|
result = check_call([self.ARCHIVER.decode('utf-8'),
|
||||||
|
self.CMDS["write"].decode('utf-8'),
|
||||||
|
arch_abspath, dst])
|
||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
return 1
|
return 1
|
||||||
finally:
|
finally:
|
||||||
@@ -225,105 +169,14 @@ class Archive(object):
|
|||||||
src = self._get_real_name(src)
|
src = self._get_real_name(src)
|
||||||
fobj = open(dst, "wb")
|
fobj = open(dst, "wb")
|
||||||
try:
|
try:
|
||||||
result = check_call([ARCHIVER, CMDS['read'], self._arch, src],
|
result = check_call([self.ARCHIVER, self.CMDS['read'], self._arch,
|
||||||
stdout=fobj)
|
src], stdout=fobj)
|
||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
return 1
|
return 1
|
||||||
finally:
|
finally:
|
||||||
fobj.close()
|
fobj.close()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
CALL_MAP = {'list': lambda a: Archive(a.arch).list(),
|
|
||||||
'copyin': lambda a: Archive(a.arch).copyin(a.src, a.dst),
|
|
||||||
'copyout': lambda a: Archive(a.arch).copyout(a.src, a.dst),
|
|
||||||
'mkdir': lambda a: Archive(a.arch).mkdir(a.dst),
|
|
||||||
'rm': lambda a: Archive(a.arch).rm(a.dst),
|
|
||||||
'run': lambda a: Archive(a.arch).run(a.dst)}
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
|
||||||
"""Use ArgumentParser to check for script arguments and execute."""
|
|
||||||
parser = ArgumentParser()
|
|
||||||
subparsers = parser.add_subparsers(help='supported commands')
|
|
||||||
parser_list = subparsers.add_parser('list', help="List contents of "
|
|
||||||
"archive")
|
|
||||||
parser_copyin = subparsers.add_parser('copyin', help="Copy file into "
|
|
||||||
"archive")
|
|
||||||
parser_copyout = subparsers.add_parser('copyout', help="Copy file out of "
|
|
||||||
"archive")
|
|
||||||
parser_rm = subparsers.add_parser('rm', help="Delete file in archive")
|
|
||||||
parser_mkdir = subparsers.add_parser('mkdir', help="Create directory in "
|
|
||||||
"archive")
|
|
||||||
parser_run = subparsers.add_parser('run', help="Execute archived file")
|
|
||||||
|
|
||||||
parser_list.add_argument('arch', help="archive filename")
|
|
||||||
parser_list.set_defaults(func=CALL_MAP['list'])
|
|
||||||
|
|
||||||
parser_copyin.add_argument('arch', help="archive filename")
|
|
||||||
parser_copyin.add_argument('src', help="source filename")
|
|
||||||
parser_copyin.add_argument('dst', help="destination filename (to be "
|
|
||||||
"written into archive)")
|
|
||||||
parser_copyin.set_defaults(func=CALL_MAP['copyin'])
|
|
||||||
|
|
||||||
parser_copyout.add_argument('arch', help="archive filename")
|
|
||||||
parser_copyout.add_argument('src', help="source filename (to be read from"
|
|
||||||
" archive")
|
|
||||||
parser_copyout.add_argument('dst', help="destination filename")
|
|
||||||
parser_copyout.set_defaults(func=CALL_MAP['copyout'])
|
|
||||||
|
|
||||||
parser_rm.add_argument('arch', help="archive filename")
|
|
||||||
parser_rm.add_argument('dst', help="File inside archive to be deleted")
|
|
||||||
parser_rm.set_defaults(func=CALL_MAP['rm'])
|
|
||||||
|
|
||||||
parser_mkdir.add_argument('arch', help="archive filename")
|
|
||||||
parser_mkdir.add_argument('dst', help="Directory name inside archive to "
|
|
||||||
"be created")
|
|
||||||
parser_mkdir.set_defaults(func=CALL_MAP['mkdir'])
|
|
||||||
|
|
||||||
parser_run.add_argument('arch', help="archive filename")
|
|
||||||
parser_run.add_argument('dst', help="File to be executed")
|
|
||||||
parser_run.set_defaults(func=CALL_MAP['run'])
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
return args.func(args)
|
|
||||||
|
|
||||||
|
|
||||||
def no_parse():
|
|
||||||
"""Failsafe argument "parsing". Note, that it blindly takes positional
|
|
||||||
arguments without checking them. In case of wrong arguments it will
|
|
||||||
silently exit"""
|
|
||||||
try:
|
|
||||||
if sys.argv[1] not in ('list', 'copyin', 'copyout', 'rm', 'mkdir',
|
|
||||||
"run"):
|
|
||||||
sys.exit(2)
|
|
||||||
except IndexError:
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
class Arg(object):
|
|
||||||
"""Mimic argparse object"""
|
|
||||||
dst = None
|
|
||||||
src = None
|
|
||||||
arch = None
|
|
||||||
|
|
||||||
arg = Arg()
|
|
||||||
|
|
||||||
try:
|
|
||||||
arg.arch = sys.argv[2]
|
|
||||||
if sys.argv[1] in ('copyin', 'copyout'):
|
|
||||||
arg.src = sys.argv[3]
|
|
||||||
arg.dst = sys.argv[4]
|
|
||||||
elif sys.argv[1] in ('rm', 'run', 'mkdir'):
|
|
||||||
arg.dst = sys.argv[3]
|
|
||||||
except IndexError:
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
return CALL_MAP[sys.argv[1]](arg)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
sys.exit(parse_args(ULha))
|
||||||
from argparse import ArgumentParser
|
|
||||||
PARSE_FUNC = parse_args
|
|
||||||
except ImportError:
|
|
||||||
PARSE_FUNC = no_parse
|
|
||||||
|
|
||||||
sys.exit(PARSE_FUNC())
|
|
||||||
|
|||||||
135
ulzx
Executable file
135
ulzx
Executable file
@@ -0,0 +1,135 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Read only, Amiga LZX[1] archiver Virtual filesystem executive for Midnight
|
||||||
|
Commander.
|
||||||
|
|
||||||
|
Tested against python 3.6, unlzx[1] 1.1 and mc 4.8.22
|
||||||
|
|
||||||
|
[1] ftp://us.aminet.net/pub/aminet/misc/unix/unlzx.c.gz.readme
|
||||||
|
|
||||||
|
Changelog:
|
||||||
|
1.2 Use python3
|
||||||
|
1.1 Moved common code into extfslib library
|
||||||
|
1.0 Initial release
|
||||||
|
|
||||||
|
Author: Roman 'gryf' Dobosz <gryf73@gmail.com>
|
||||||
|
Date: 2019-06-30
|
||||||
|
Version: 1.2
|
||||||
|
Licence: BSD
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
from subprocess import call, CalledProcessError
|
||||||
|
from tempfile import mkdtemp, mkstemp
|
||||||
|
|
||||||
|
from extfslib import Archive, parse_args
|
||||||
|
|
||||||
|
|
||||||
|
class ULzx(Archive):
|
||||||
|
"""Archive handle. Provides interface to MC's extfs subsystem"""
|
||||||
|
LINE_PAT = re.compile(b"^\s+(?P<size>\d+)\s+"
|
||||||
|
b"((n/a)|\d+)\s"
|
||||||
|
b"(?P<time>\d{2}:\d{2}:\d{2})\s+"
|
||||||
|
b"(?P<date>\d+-[a-z]{3}-\d{4})\s"
|
||||||
|
b"(?P<perms>[h-][s-][p-][a-][r-][w-][e-][d-])\s"
|
||||||
|
b"\"(?P<fpath>.*)\"")
|
||||||
|
ARCHIVER = b"unlzx"
|
||||||
|
CMDS = {"list": b"-v",
|
||||||
|
"read": b"-x"}
|
||||||
|
DATETIME = b"%02d-%02d-%s %02d:%02d"
|
||||||
|
|
||||||
|
def _get_date(self, time, date):
|
||||||
|
"""Return MM-DD-YYYY hh:mm formatted date out of time and date
|
||||||
|
strings"""
|
||||||
|
month_list = [b"jan", b"feb", b"mar", b"apr", b"may", b"jun", b"jul",
|
||||||
|
b"aug", b"sep", b"oct", b"nov", b"dec"]
|
||||||
|
day, month, year = date.split(b"-")
|
||||||
|
month = month_list.index(month) + 1
|
||||||
|
hours, minutes, dummy = time.split(b":")
|
||||||
|
return self.DATETIME % (month, int(day), year, int(hours),
|
||||||
|
int(minutes))
|
||||||
|
|
||||||
|
def _get_dir(self):
|
||||||
|
"""Prepare archive file listing"""
|
||||||
|
contents = []
|
||||||
|
|
||||||
|
out = self._call_command("list")
|
||||||
|
if not out:
|
||||||
|
return
|
||||||
|
|
||||||
|
for line in out.split(b"\n"):
|
||||||
|
match = self.LINE_PAT.match(line)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
|
||||||
|
match_entry = match.groupdict()
|
||||||
|
entry = {}
|
||||||
|
for key in match_entry:
|
||||||
|
entry[bytes(key, 'utf-8')] = match_entry[key]
|
||||||
|
del match_entry
|
||||||
|
|
||||||
|
entry[b'datetime'] = self._get_date(entry[b'time'], entry[b'date'])
|
||||||
|
entry[b'display_name'] = self._map_name(entry[b'fpath'])
|
||||||
|
entry[b'perms'] = b"-rw-r--r--" # lzx doesn't store empty dirs
|
||||||
|
entry[b'uid'] = bytes(str(self._uid), 'utf-8')
|
||||||
|
entry[b'gid'] = bytes(str(self._gid), 'utf-8')
|
||||||
|
contents.append(entry)
|
||||||
|
|
||||||
|
return contents
|
||||||
|
|
||||||
|
def list(self):
|
||||||
|
"""Output contents of the archive to stdout"""
|
||||||
|
for entry in self._contents:
|
||||||
|
sys.stdout.buffer.write(self.ITEM % entry)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def run(self, dst):
|
||||||
|
"""Execute file out of archive"""
|
||||||
|
fdesc, tmp_file = mkstemp()
|
||||||
|
os.close(fdesc)
|
||||||
|
result = 0
|
||||||
|
|
||||||
|
if self.copyout(dst, tmp_file) != 0:
|
||||||
|
result = 1
|
||||||
|
|
||||||
|
os.chmod(tmp_file, int("700", 8))
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = call([tmp_file])
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.unlink(tmp_file)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def copyout(self, src, dst):
|
||||||
|
"""Unfortunately, to copy one file out entire LZX archive have to be
|
||||||
|
extracted. For small archives is not a problem, but in relatively big
|
||||||
|
one it could be a performance issue."""
|
||||||
|
tmp_dir = mkdtemp()
|
||||||
|
src = self._get_real_name(src)
|
||||||
|
current_dir = os.path.abspath(os.curdir)
|
||||||
|
os.chdir(tmp_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(os.devnull, "w") as fnull:
|
||||||
|
result = call([self.ARCHIVER, self.CMDS['read'],
|
||||||
|
os.path.join(current_dir, self._arch)],
|
||||||
|
stdout=fnull, stderr=fnull)
|
||||||
|
if result == 0:
|
||||||
|
shutil.copy2(src, dst)
|
||||||
|
except CalledProcessError:
|
||||||
|
return 1
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tmp_dir)
|
||||||
|
os.chdir(current_dir)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(parse_args(ULzx))
|
||||||
Reference in New Issue
Block a user