[pypy-svn] r49396 - pypy/branch/pypy-interp-file/module/_file
arigo at codespeak.net
arigo at codespeak.net
Wed Dec 5 19:21:54 CET 2007
Author: arigo
Date: Wed Dec 5 19:21:54 2007
New Revision: 49396
Added:
pypy/branch/pypy-interp-file/module/_file/interp_file.py (contents, props changed)
Log:
Forgot this file.
Added: pypy/branch/pypy-interp-file/module/_file/interp_file.py
==============================================================================
--- (empty file)
+++ pypy/branch/pypy-interp-file/module/_file/interp_file.py Wed Dec 5 19:21:54 2007
@@ -0,0 +1,311 @@
+import py
+from pypy.rlib import streamio
+from pypy.module._file.interp_stream import W_AbstractStream
+from pypy.module._file.interp_stream import StreamErrors, wrap_streamerror
+from pypy.interpreter.error import OperationError
+from pypy.interpreter.gateway import ObjSpace, W_Root, Arguments
+from pypy.interpreter.typedef import TypeDef
+from pypy.interpreter.gateway import interp2app
+
+
+class W_File(W_AbstractStream):
+ """An interp-level file object. This implements the same interface than
+ the app-level files, with the following differences:
+
+ * method names are prefixed with 'file_'
+ * the 'normal' app-level constructor is implemented by file___init__().
+ * the methods with the 'direct_' prefix should be used if the caller
+ locks and unlocks the file itself, and takes care of StreamErrors.
+ """
+
+ # Default values until the file is successfully opened
+ stream = None
+ name = "<uninitialized file>"
+ mode = "<uninitialized file>"
+ encoding = None
+ fd = -1
+
+ def __init__(self, space):
+ self.space = space
+
+ def __del__(self):
+ # assume that the file and stream objects are only visible in the
+ # thread that runs __del__, so no race condition should be possible
+ if self.stream is not None:
+ self.direct_close()
+
+ def fdopenstream(self, stream, fd, mode, name):
+ self.fd = fd
+ self.name = name
+ self.softspace = 0 # Required according to file object docs
+ self.encoding = None # This is not used internally by file objects
+ self.mode = mode
+ self.stream = stream
+ if stream.flushable():
+ getopenstreams(self.space)[stream] = None
+
+ def check_mode_ok(self, mode):
+ if not mode or mode[0] not in ['r', 'w', 'a', 'U']:
+ space = self.space
+ raise OperationError(space.w_IOError,
+ space.wrap('invalid mode : "%s"' % mode))
+
+ def getstream(self):
+ """Return self.stream or raise an app-level ValueError if missing
+ (i.e. if the file is closed)."""
+ stream = self.stream
+ if stream is None:
+ space = self.space
+ raise OperationError(space.w_ValueError,
+ space.wrap('I/O operation on closed file'))
+ return stream
+
+ # ____________________________________________________________
+ #
+ # The 'direct_' methods assume that the caller already acquired the
+ # file lock. They don't convert StreamErrors to OperationErrors, too.
+
+ def direct___init__(self, name, mode='r', buffering=-1):
+ self.direct_close()
+ self.check_mode_ok(mode)
+ stream = streamio.open_file_as_stream(name, mode, buffering)
+ fd = stream.try_to_find_file_descriptor()
+ self.fdopenstream(stream, fd, mode, name)
+
+ def direct_fdopen(self, fd, mode='r', buffering=-1):
+ self.direct_close()
+ self.check_mode_ok(mode)
+ stream = streamio.fdopen_as_stream(fd, mode, buffering)
+ self.fdopenstream(stream, fd, mode, '<fdopen>')
+
+ def direct_close(self):
+ space = self.space
+ stream = self.stream
+ if stream is not None:
+ self.stream = None
+ openstreams = getopenstreams(self.space)
+ try:
+ del openstreams[stream]
+ except KeyError:
+ pass
+ stream.close()
+
+ def direct_read(self, n=-1):
+ stream = self.getstream()
+ if n < 0:
+ return stream.readall()
+ else:
+ result = []
+ while n > 0:
+ data = stream.read(n)
+ if not data:
+ break
+ n -= len(data)
+ result.append(data)
+ return ''.join(result)
+
+ def direct_readline(self, size=-1):
+ stream = self.getstream()
+ if size < 0:
+ return stream.readline()
+ else:
+ # very inefficient unless there is a peek()
+ result = []
+ while size > 0:
+ # "peeks" on the underlying stream to see how many chars
+ # we can safely read without reading past an end-of-line
+ peeked = stream.peek()
+ pn = peeked.find("\n", 0, size)
+ if pn < 0:
+ pn = min(size-1, len(peeked))
+ c = stream.read(pn + 1)
+ if not c:
+ break
+ result.append(c)
+ if c.endswith('\n'):
+ break
+ size -= len(c)
+ return ''.join(result)
+
+ def direct_readlines(self, size=0):
+ stream = self.getstream()
+ # NB. this implementation is very inefficient for unbuffered
+ # streams, but ok if stream.readline() is efficient.
+ if size <= 0:
+ result = []
+ while True:
+ line = stream.readline()
+ if not line:
+ break
+ result.append(line)
+ size -= len(line)
+ else:
+ result = []
+ while size > 0:
+ line = stream.readline()
+ if not line:
+ break
+ result.append(line)
+ size -= len(line)
+ return result
+
+ def direct_write(self, data):
+ self.getstream().write(data)
+
+ # ____________________________________________________________
+ #
+ # The 'file_' methods are the one exposed to app-level.
+
+ _exposed_method_names = []
+ _exposed_classmethod_names = []
+
+ def _decl(class_scope, name, unwrap_spec, docstring,
+ as_classmethod=False, wrapresult="space.wrap(result)"):
+ # hack hack to build a wrapper around the direct_xxx methods.
+ # The wrapper adds lock/unlock calls and a space.wrap() on
+ # the result, conversion of stream errors to OperationErrors,
+ # and has the specified docstring and unwrap_spec.
+ direct_fn = class_scope['direct_' + name]
+ co = direct_fn.func_code
+ argnames = co.co_varnames[:co.co_argcount]
+ defaults = direct_fn.func_defaults or ()
+
+ args = []
+ for i, argname in enumerate(argnames):
+ try:
+ default = defaults[-len(argnames) + i]
+ except IndexError:
+ args.append(argname)
+ else:
+ args.append('%s=%r' % (argname, default))
+ sig = ', '.join(args)
+ assert argnames[0] == 'self'
+ callsig = ', '.join(argnames[1:])
+
+ src = py.code.Source("""
+ def file_%(name)s(%(sig)s):
+ %(docstring)r
+ space = self.space
+ self.lock()
+ try:
+ try:
+ result = self.direct_%(name)s(%(callsig)s)
+ except StreamErrors, e:
+ raise wrap_streamerror(space, e)
+ finally:
+ self.unlock()
+ return %(wrapresult)s
+ """ % locals())
+ exec str(src) in globals(), class_scope
+ class_scope['file_' + name].unwrap_spec = unwrap_spec
+ interp2app
+ if as_classmethod:
+ class_scope['_exposed_classmethod_names'].append(name)
+ else:
+ class_scope['_exposed_method_names'].append(name)
+
+
+ _decl(locals(), "__init__", ['self', str, str, int],
+ """Opens a file.""")
+
+ _decl(locals(), "close", ['self'],
+ """close() -> None or (perhaps) an integer. Close the file.
+
+Sets data attribute .closed to True. A closed file cannot be used for
+further I/O operations. close() may be called more than once without
+error. Some kinds of file objects (for example, opened by popen())
+may return an exit status upon closing.""")
+ # NB. close() need to use the stream lock to avoid double-closes or
+ # close-while-another-thread-uses-it.
+
+ _decl(locals(), "read", ['self', int],
+ """read([size]) -> read at most size bytes, returned as a string.
+
+If the size argument is negative or omitted, read until EOF is reached.
+Notice that when in non-blocking mode, less data than what was requested
+may be returned, even if no size parameter was given.""")
+
+ _decl(locals(), "readline", ['self', int],
+ """readlines([size]) -> list of strings, each a line from the file.
+
+Call readline() repeatedly and return a list of the lines so read.
+The optional size argument, if given, is an approximate bound on the
+total number of bytes in the lines returned.""")
+
+ _decl(locals(), "readlines", ['self', int],
+ """readlines([size]) -> list of strings, each a line from the file.
+
+Call readline() repeatedly and return a list of the lines so read.
+The optional size argument, if given, is an approximate bound on the
+total number of bytes in the lines returned.""",
+ wrapresult = "wrap_list_of_str(space, result)")
+
+ _decl(locals(), "write", ['self', str],
+ """write(str) -> None. Write string str to file.
+
+Note that due to buffering, flush() or close() may be needed before
+the file on disk reflects the data written.""")
+
+
+# ____________________________________________________________
+
+
+def descr_file__new__(space, w_subtype, args):
+ file = space.allocate_instance(W_File, w_subtype)
+ W_File.__init__(file, space)
+ return space.wrap(file)
+descr_file__new__.unwrap_spec = [ObjSpace, W_Root, Arguments]
+
+def descr_file_fdopen(space, w_subtype, fd, mode='r', buffering=-1):
+ file = space.allocate_instance(W_File, w_subtype)
+ W_File.__init__(file, space)
+ try:
+ file.direct_fdopen(fd, mode, buffering)
+ except StreamErrors, e:
+ raise wrap_streamerror(space, e)
+ return space.wrap(file)
+descr_file_fdopen.unwrap_spec = [ObjSpace, W_Root, int, str, int]
+
+
+W_File.typedef = TypeDef(
+ "file",
+ __doc__ = """file(name[, mode[, buffering]]) -> file object
+
+Open a file. The mode can be 'r', 'w' or 'a' for reading (default),
+writing or appending. The file will be created if it doesn't exist
+when opened for writing or appending; it will be truncated when
+opened for writing. Add a 'b' to the mode for binary files.
+Add a '+' to the mode to allow simultaneous reading and writing.
+If the buffering argument is given, 0 means unbuffered, 1 means line
+buffered, and larger numbers specify the buffer size.
+Add a 'U' to mode to open the file for input with universal newline
+support. Any line ending in the input file will be seen as a '\n'
+in Python. Also, a file so opened gains the attribute 'newlines';
+the value for this attribute is one of None (no newline read yet),
+'\r', '\n', '\r\n' or a tuple containing all the newline types seen.
+
+Note: open() is an alias for file().
+""",
+ __new__ = interp2app(descr_file__new__),
+ fdopen = interp2app(descr_file_fdopen, as_classmethod=True),
+ **dict([(name, interp2app(getattr(W_File, 'file_' + name)))
+ for name in W_File._exposed_method_names])
+ )
+
+# ____________________________________________________________
+
+def fdopen_as_stream(space, fd, mode="r", buffering=-1):
+ is_mode_ok(space, mode)
+ return space.wrap(W_Stream(
+ space, streamio.fdopen_as_stream(fd, mode, buffering)))
+fdopen_as_stream.unwrap_spec = [ObjSpace, int, str, int]
+
+def wrap_list_of_str(space, lst):
+ return space.newlist([space.wrap(s) for s in lst])
+
+class FileState:
+ def __init__(self, space):
+ self.openstreams = {}
+
+def getopenstreams(space):
+ return space.fromcache(FileState).openstreams
More information about the Pypy-commit
mailing list