diff CSP2/CSP2_env/env-d9b9114564458d9d-741b3de822f2aaca6c6caa4325c4afce/include/kj/async-inl.h @ 69:33d812a61356

planemo upload commit 2e9511a184a1ca667c7be0c6321a36dc4e3d116d
author jpayne
date Tue, 18 Mar 2025 17:55:14 -0400
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/CSP2/CSP2_env/env-d9b9114564458d9d-741b3de822f2aaca6c6caa4325c4afce/include/kj/async-inl.h	Tue Mar 18 17:55:14 2025 -0400
@@ -0,0 +1,2345 @@
+// Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
+// Licensed under the MIT License:
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+// This file contains extended inline implementation details that are required along with async.h.
+// We move this all into a separate file to make async.h more readable.
+//
+// Non-inline declarations here are defined in async.c++.
+
+#pragma once
+
+#ifndef KJ_ASYNC_H_INCLUDED
+#error "Do not include this directly; include kj/async.h."
+#include "async.h"  // help IDE parse this file
+#endif
+
+#if _MSC_VER && KJ_HAS_COROUTINE
+#include <intrin.h>
+#endif
+
+#include <kj/list.h>
+
+KJ_BEGIN_HEADER
+
+namespace kj {
+namespace _ {  // private
+
+template <typename T>
+class ExceptionOr;
+
+class ExceptionOrValue {
+public:
+  ExceptionOrValue(bool, Exception&& exception): exception(kj::mv(exception)) {}
+  KJ_DISALLOW_COPY(ExceptionOrValue);
+
+  void addException(Exception&& exception) {
+    if (this->exception == nullptr) {
+      this->exception = kj::mv(exception);
+    }
+  }
+
+  template <typename T>
+  ExceptionOr<T>& as() { return *static_cast<ExceptionOr<T>*>(this); }
+  template <typename T>
+  const ExceptionOr<T>& as() const { return *static_cast<const ExceptionOr<T>*>(this); }
+
+  Maybe<Exception> exception;
+
+protected:
+  // Allow subclasses to have move constructor / assignment.
+  ExceptionOrValue() = default;
+  ExceptionOrValue(ExceptionOrValue&& other) = default;
+  ExceptionOrValue& operator=(ExceptionOrValue&& other) = default;
+};
+
+template <typename T>
+class ExceptionOr: public ExceptionOrValue {
+public:
+  ExceptionOr() = default;
+  ExceptionOr(T&& value): value(kj::mv(value)) {}
+  ExceptionOr(bool, Exception&& exception): ExceptionOrValue(false, kj::mv(exception)) {}
+  ExceptionOr(ExceptionOr&&) = default;
+  ExceptionOr& operator=(ExceptionOr&&) = default;
+
+  Maybe<T> value;
+};
+
+template <typename T>
+inline T convertToReturn(ExceptionOr<T>&& result) {
+  KJ_IF_MAYBE(value, result.value) {
+    KJ_IF_MAYBE(exception, result.exception) {
+      throwRecoverableException(kj::mv(*exception));
+    }
+    return _::returnMaybeVoid(kj::mv(*value));
+  } else KJ_IF_MAYBE(exception, result.exception) {
+    throwFatalException(kj::mv(*exception));
+  } else {
+    // Result contained neither a value nor an exception?
+    KJ_UNREACHABLE;
+  }
+}
+
+inline void convertToReturn(ExceptionOr<Void>&& result) {
+  // Override <void> case to use throwRecoverableException().
+
+  if (result.value != nullptr) {
+    KJ_IF_MAYBE(exception, result.exception) {
+      throwRecoverableException(kj::mv(*exception));
+    }
+  } else KJ_IF_MAYBE(exception, result.exception) {
+    throwRecoverableException(kj::mv(*exception));
+  } else {
+    // Result contained neither a value nor an exception?
+    KJ_UNREACHABLE;
+  }
+}
+
+class TraceBuilder {
+  // Helper for methods that build a call trace.
+public:
+  TraceBuilder(ArrayPtr<void*> space)
+      : start(space.begin()), current(space.begin()), limit(space.end()) {}
+
+  inline void add(void* addr) {
+    if (current < limit) {
+      *current++ = addr;
+    }
+  }
+
+  inline bool full() const { return current == limit; }
+
+  ArrayPtr<void*> finish() {
+    return arrayPtr(start, current);
+  }
+
+  String toString();
+
+private:
+  void** start;
+  void** current;
+  void** limit;
+};
+
+struct alignas(void*) PromiseArena {
+  // Space in which a chain of promises may be allocated. See PromiseDisposer.
+  byte bytes[1024];
+};
+
+class Event: private AsyncObject {
+  // An event waiting to be executed.  Not for direct use by applications -- promises use this
+  // internally.
+
+public:
+  Event(SourceLocation location);
+  Event(kj::EventLoop& loop, SourceLocation location);
+  ~Event() noexcept(false);
+  KJ_DISALLOW_COPY_AND_MOVE(Event);
+
+  void armDepthFirst();
+  // Enqueue this event so that `fire()` will be called from the event loop soon.
+  //
+  // Events scheduled in this way are executed in depth-first order:  if an event callback arms
+  // more events, those events are placed at the front of the queue (in the order in which they
+  // were armed), so that they run immediately after the first event's callback returns.
+  //
+  // Depth-first event scheduling is appropriate for events that represent simple continuations
+  // of a previous event that should be globbed together for performance.  Depth-first scheduling
+  // can lead to starvation, so any long-running task must occasionally yield with
+  // `armBreadthFirst()`.  (Promise::then() uses depth-first whereas evalLater() uses
+  // breadth-first.)
+  //
+  // To use breadth-first scheduling instead, use `armBreadthFirst()`.
+
+  void armBreadthFirst();
+  // Like `armDepthFirst()` except that the event is placed at the end of the queue.
+
+  void armLast();
+  // Enqueues this event to happen after all other events have run to completion and there is
+  // really nothing left to do except wait for I/O.
+
+  bool isNext();
+  // True if the Event has been armed and is next in line to be fired. This can be used after
+  // calling PromiseNode::onReady(event) to determine if a promise being waited is immediately
+  // ready, in which case continuations may be optimistically run without returning to the event
+  // loop. Note that this optimization is only valid if we know that we would otherwise immediately
+  // return to the event loop without running more application code. So this turns out to be useful
+  // in fairly narrow circumstances, chiefly when a coroutine is about to suspend, but discovers it
+  // doesn't need to.
+  //
+  // Returns false if the event loop is not currently running. This ensures that promise
+  // continuations don't execute except under a call to .wait().
+
+  void disarm();
+  // If the event is armed but hasn't fired, cancel it. (Destroying the event does this
+  // implicitly.)
+
+  virtual void traceEvent(TraceBuilder& builder) = 0;
+  // Build a trace of the callers leading up to this event. `builder` will be populated with
+  // "return addresses" of the promise chain waiting on this event. The return addresses may
+  // actually the addresses of lambdas passed to .then(), but in any case, feeding them into
+  // addr2line should produce useful source code locations.
+  //
+  // `traceEvent()` may be called from an async signal handler while `fire()` is executing. It
+  // must not allocate nor take locks.
+
+  String traceEvent();
+  // Helper that builds a trace and stringifies it.
+
+protected:
+  virtual Maybe<Own<Event>> fire() = 0;
+  // Fire the event.  Possibly returns a pointer to itself, which will be discarded by the
+  // caller.  This is the only way that an event can delete itself as a result of firing, as
+  // doing so from within fire() will throw an exception.
+
+private:
+  friend class kj::EventLoop;
+  EventLoop& loop;
+  Event* next;
+  Event** prev;
+  bool firing = false;
+
+  static constexpr uint MAGIC_LIVE_VALUE = 0x1e366381u;
+  uint live = MAGIC_LIVE_VALUE;
+  SourceLocation location;
+};
+
+class PromiseArenaMember {
+  // An object that is allocated in a PromiseArena. `PromiseNode` inherits this, and most
+  // arena-allocated objects are `PromiseNode` subclasses, but `TaskSet::Task`, ForkHub, and
+  // potentially other objects that commonly live on the end of a promise chain can also leverage
+  // this.
+
+public:
+  virtual void destroy() = 0;
+  // Destroys and frees the node.
+  //
+  // If the node was allocated using allocPromise<T>(), then destroy() must call
+  // freePromise<T>(this). If it was allocated some other way, then it is `destroy()`'s
+  // responsibility to complete any necessary cleanup of memory, e.g. call `delete this`.
+  //
+  // We use this instead of a virtual destructor for two reasons:
+  // 1. Coroutine nodes are not independent objects, they have to call destroy() on the coroutine
+  //    handle to delete themselves.
+  // 2. XThreadEvents sometimes leave it up to a different thread to actually delete the object.
+
+private:
+  PromiseArena* arena = nullptr;
+  // If non-null, then this PromiseNode is the last node allocated within the given arena, and
+  // therefore owns the arena. After this node is destroyed, the arena should be deleted.
+  //
+  // PromiseNodes are allocated within the arena starting from the end, and `PromiseNode`s
+  // allocated this way are required to have `PromiseNode` itself as their leftmost inherited type,
+  // so that the pointers match. Thus, the space in `arena` from its start to the location of the
+  // `PromiseNode` is known to be available for subsequent allocations (which should then take
+  // ownership of the arena).
+
+  friend class PromiseDisposer;
+};
+
+class PromiseNode: public PromiseArenaMember, private AsyncObject {
+  // A Promise<T> contains a chain of PromiseNodes tracking the pending transformations.
+  //
+  // To reduce generated code bloat, PromiseNode is not a template.  Instead, it makes very hacky
+  // use of pointers to ExceptionOrValue which actually point to ExceptionOr<T>, but are only
+  // so down-cast in the few places that really need to be templated.  Luckily this is all
+  // internal implementation details.
+
+public:
+  virtual void onReady(Event* event) noexcept = 0;
+  // Arms the given event when ready.
+  //
+  // May be called multiple times. If called again before the event was armed, the old event will
+  // never be armed, only the new one. If called again after the event was armed, the new event
+  // will be armed immediately. Can be called with nullptr to un-register the existing event.
+
+  virtual void setSelfPointer(OwnPromiseNode* selfPtr) noexcept;
+  // Tells the node that `selfPtr` is the pointer that owns this node, and will continue to own
+  // this node until it is destroyed or setSelfPointer() is called again.  ChainPromiseNode uses
+  // this to shorten redundant chains.  The default implementation does nothing; only
+  // ChainPromiseNode should implement this.
+
+  virtual void get(ExceptionOrValue& output) noexcept = 0;
+  // Get the result.  `output` points to an ExceptionOr<T> into which the result will be written.
+  // Can only be called once, and only after the node is ready.  Must be called directly from the
+  // event loop, with no application code on the stack.
+
+  virtual void tracePromise(TraceBuilder& builder, bool stopAtNextEvent) = 0;
+  // Build a trace of this promise chain, showing what it is currently waiting on.
+  //
+  // Since traces are ordered callee-before-caller, PromiseNode::tracePromise() should typically
+  // recurse to its child first, then after the child returns, add itself to the trace.
+  //
+  // If `stopAtNextEvent` is true, then the trace should stop as soon as it hits a PromiseNode that
+  // also implements Event, and should not trace that node or its children. This is used in
+  // conjunction with Event::traceEvent(). The chain of Events is often more sparse than the chain
+  // of PromiseNodes, because a TransformPromiseNode (which implements .then()) is not itself an
+  // Event. TransformPromiseNode instead tells its child node to directly notify its *parent* node
+  // when it is ready, and then TransformPromiseNode applies the .then() transformation during the
+  // call to .get().
+  //
+  // So, when we trace the chain of Events backwards, we end up hoping over segments of
+  // TransformPromiseNodes (and other similar types). In order to get those added to the trace,
+  // each Event must call back down the PromiseNode chain in the opposite direction, using this
+  // method.
+  //
+  // `tracePromise()` may be called from an async signal handler while `get()` is executing. It
+  // must not allocate nor take locks.
+
+  template <typename T>
+  static OwnPromiseNode from(T&& promise) {
+    // Given a Promise, extract the PromiseNode.
+    return kj::mv(promise.node);
+  }
+  template <typename T>
+  static PromiseNode& from(T& promise) {
+    // Given a Promise, extract the PromiseNode.
+    return *promise.node;
+  }
+  template <typename T>
+  static T to(OwnPromiseNode&& node) {
+    // Construct a Promise from a PromiseNode. (T should be a Promise type.)
+    return T(false, kj::mv(node));
+  }
+
+protected:
+  class OnReadyEvent {
+    // Helper class for implementing onReady().
+
+  public:
+    void init(Event* newEvent);
+
+    void arm();
+    void armBreadthFirst();
+    // Arms the event if init() has already been called and makes future calls to init()
+    // automatically arm the event.
+
+    inline void traceEvent(TraceBuilder& builder) {
+      if (event != nullptr && !builder.full()) event->traceEvent(builder);
+    }
+
+  private:
+    Event* event = nullptr;
+  };
+};
+
+class PromiseDisposer {
+public:
+  template <typename T>
+  static constexpr bool canArenaAllocate() {
+    // We can only use arena allocation for types that fit in an arena and have pointer-size
+    // alignment. Anything else will need to be allocated as a separate heap object.
+    return sizeof(T) <= sizeof(PromiseArena) && alignof(T) <= alignof(void*);
+  }
+
+  static void dispose(PromiseArenaMember* node) {
+    PromiseArena* arena = node->arena;
+    node->destroy();
+    delete arena;  // reminder: `delete` automatically ignores null pointers
+  }
+
+  template <typename T, typename D = PromiseDisposer, typename... Params>
+  static kj::Own<T, D> alloc(Params&&... params) noexcept {
+    // Implements allocPromise().
+    T* ptr;
+    if (!canArenaAllocate<T>()) {
+      // Node too big (or needs weird alignment), fall back to regular heap allocation.
+      ptr = new T(kj::fwd<Params>(params)...);
+    } else {
+      // Start a new arena.
+      //
+      // NOTE: As in append() (below), we don't implement exception-safety because it causes code
+      //   bloat and these constructors probably don't throw. Instead this function is noexcept, so
+      //   if a constructor does throw, it'll crash rather than leak memory.
+      auto* arena = new PromiseArena;
+      ptr = reinterpret_cast<T*>(arena + 1) - 1;
+      ctor(*ptr, kj::fwd<Params>(params)...);
+      ptr->arena = arena;
+      KJ_IREQUIRE(reinterpret_cast<void*>(ptr) ==
+                  reinterpret_cast<void*>(static_cast<PromiseArenaMember*>(ptr)),
+          "PromiseArenaMember must be the leftmost inherited type.");
+    }
+    return kj::Own<T, D>(ptr);
+  }
+
+  template <typename T, typename D = PromiseDisposer, typename... Params>
+  static kj::Own<T, D> append(
+      OwnPromiseNode&& next, Params&&... params) noexcept {
+    // Implements appendPromise().
+
+    PromiseArena* arena = next->arena;
+
+    if (!canArenaAllocate<T>() || arena == nullptr ||
+        reinterpret_cast<byte*>(next.get()) - reinterpret_cast<byte*>(arena) < sizeof(T)) {
+      // No arena available, or not enough space, or weird alignment needed. Start new arena.
+      return alloc<T, D>(kj::mv(next), kj::fwd<Params>(params)...);
+    } else {
+      // Append to arena.
+      //
+      // NOTE: When we call ctor(), it takes ownership of `next`, so we shouldn't assume `next`
+      //   still exists after it returns. So we have to remove ownership of the arena before that.
+      //   In theory if we wanted this to be exception-safe, we'd also have to arrange to delete
+      //   the arena if the constructor throws. However, in practice none of the PromiseNode
+      //   constructors throw, so we just mark the whole method noexcept in order to avoid the
+      //   code bloat to handle this case.
+      next->arena = nullptr;
+      T* ptr = reinterpret_cast<T*>(next.get()) - 1;
+      ctor(*ptr, kj::mv(next), kj::fwd<Params>(params)...);
+      ptr->arena = arena;
+      KJ_IREQUIRE(reinterpret_cast<void*>(ptr) ==
+                  reinterpret_cast<void*>(static_cast<PromiseArenaMember*>(ptr)),
+          "PromiseArenaMember must be the leftmost inherited type.");
+      return kj::Own<T, D>(ptr);
+    }
+  }
+};
+
+template <typename T, typename... Params>
+static kj::Own<T, PromiseDisposer> allocPromise(Params&&... params) {
+  // Allocate a PromiseNode without appending it to any existing promise arena. Space for a new
+  // arena will be allocated.
+  return PromiseDisposer::alloc<T>(kj::fwd<Params>(params)...);
+}
+
+template <typename T, bool arena = PromiseDisposer::canArenaAllocate<T>()>
+struct FreePromiseNode;
+template <typename T>
+struct FreePromiseNode<T, true> {
+  static inline void free(T* ptr) {
+    // The object will have been allocated in an arena, so we only want to run the destructor.
+    // The arena's memory will be freed separately.
+    kj::dtor(*ptr);
+  }
+};
+template <typename T>
+struct FreePromiseNode<T, false> {
+  static inline void free(T* ptr) {
+    // The object will have been allocated separately on the heap.
+    return delete ptr;
+  }
+};
+
+template <typename T>
+static void freePromise(T* ptr) {
+  // Free a PromiseNode originally allocated using `allocPromise<T>()`. The implementation of
+  // PromiseNode::destroy() must call this for any type that is allocated using allocPromise().
+  FreePromiseNode<T>::free(ptr);
+}
+
+template <typename T, typename... Params>
+static kj::Own<T, PromiseDisposer> appendPromise(OwnPromiseNode&& next, Params&&... params) {
+  // Append a promise to the arena that currently ends with `next`. `next` is also still passed as
+  // the first parameter to the new object's constructor.
+  //
+  // This is semantically the same as `allocPromise()` except that it may avoid the underlying
+  // memory allocation. `next` must end up being destroyed before the new object (i.e. the new
+  // object must never transfer away ownership of `next`).
+  return PromiseDisposer::append<T>(kj::mv(next), kj::fwd<Params>(params)...);
+}
+
+// -------------------------------------------------------------------
+
+inline ReadyNow::operator Promise<void>() const {
+  return PromiseNode::to<Promise<void>>(readyNow());
+}
+
+template <typename T>
+inline NeverDone::operator Promise<T>() const {
+  return PromiseNode::to<Promise<T>>(neverDone());
+}
+
+// -------------------------------------------------------------------
+
+class ImmediatePromiseNodeBase: public PromiseNode {
+public:
+  ImmediatePromiseNodeBase();
+  ~ImmediatePromiseNodeBase() noexcept(false);
+
+  void onReady(Event* event) noexcept override;
+  void tracePromise(TraceBuilder& builder, bool stopAtNextEvent) override;
+};
+
+template <typename T>
+class ImmediatePromiseNode final: public ImmediatePromiseNodeBase {
+  // A promise that has already been resolved to an immediate value or exception.
+
+public:
+  ImmediatePromiseNode(ExceptionOr<T>&& result): result(kj::mv(result)) {}
+  void destroy() override { freePromise(this); }
+
+  void get(ExceptionOrValue& output) noexcept override {
+    output.as<T>() = kj::mv(result);
+  }
+
+private:
+  ExceptionOr<T> result;
+};
+
+class ImmediateBrokenPromiseNode final: public ImmediatePromiseNodeBase {
+public:
+  ImmediateBrokenPromiseNode(Exception&& exception);
+  void destroy() override;
+
+  void get(ExceptionOrValue& output) noexcept override;
+
+private:
+  Exception exception;
+};
+
+template <typename T, T value>
+class ConstPromiseNode: public ImmediatePromiseNodeBase {
+public:
+  void destroy() override {}
+  void get(ExceptionOrValue& output) noexcept override {
+    output.as<T>() = value;
+  }
+};
+
+// -------------------------------------------------------------------
+
+class AttachmentPromiseNodeBase: public PromiseNode {
+public:
+  AttachmentPromiseNodeBase(OwnPromiseNode&& dependency);
+
+  void onReady(Event* event) noexcept override;
+  void get(ExceptionOrValue& output) noexcept override;
+  void tracePromise(TraceBuilder& builder, bool stopAtNextEvent) override;
+
+private:
+  OwnPromiseNode dependency;
+
+  void dropDependency();
+
+  template <typename>
+  friend class AttachmentPromiseNode;
+};
+
+template <typename Attachment>
+class AttachmentPromiseNode final: public AttachmentPromiseNodeBase {
+  // A PromiseNode that holds on to some object (usually, an Own<T>, but could be any movable
+  // object) until the promise resolves.
+
+public:
+  AttachmentPromiseNode(OwnPromiseNode&& dependency, Attachment&& attachment)
+      : AttachmentPromiseNodeBase(kj::mv(dependency)),
+        attachment(kj::mv<Attachment>(attachment)) {}
+  void destroy() override { freePromise(this); }
+
+  ~AttachmentPromiseNode() noexcept(false) {
+    // We need to make sure the dependency is deleted before we delete the attachment because the
+    // dependency may be using the attachment.
+    dropDependency();
+  }
+
+private:
+  Attachment attachment;
+};
+
+// -------------------------------------------------------------------
+
+#if __GNUC__ >= 8 && !__clang__
+// GCC 8's class-memaccess warning rightly does not like the memcpy()'s below, but there's no
+// "legal" way for us to extract the content of a PTMF so too bad.
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wclass-memaccess"
+#if __GNUC__ >= 11
+// GCC 11's array-bounds is  similarly upset with us for digging into "private" implementation
+// details. But the format is well-defined by the ABI which cannot change so please just let us
+// do it kthx.
+#pragma GCC diagnostic ignored "-Warray-bounds"
+#endif
+#endif
+
+template <typename T, typename ReturnType, typename... ParamTypes>
+void* getMethodStartAddress(T& obj, ReturnType (T::*method)(ParamTypes...));
+template <typename T, typename ReturnType, typename... ParamTypes>
+void* getMethodStartAddress(const T& obj, ReturnType (T::*method)(ParamTypes...) const);
+// Given an object and a pointer-to-method, return the start address of the method's code. The
+// intent is that this address can be used in a trace; addr2line should map it to the start of
+// the function's definition. For virtual methods, this does a vtable lookup on `obj` to determine
+// the address of the specific implementation (otherwise, `obj` wouldn't be needed).
+//
+// Note that if the method is overloaded or is a template, you will need to explicitly specify
+// the param and return types, otherwise the compiler won't know which overload / template
+// specialization you are requesting.
+
+class PtmfHelper {
+  // This class is a private helper for GetFunctorStartAddress and getMethodStartAddress(). The
+  // class represents the internal representation of a pointer-to-member-function.
+
+  template <typename... ParamTypes>
+  friend struct GetFunctorStartAddress;
+  template <typename T, typename ReturnType, typename... ParamTypes>
+  friend void* getMethodStartAddress(T& obj, ReturnType (T::*method)(ParamTypes...));
+  template <typename T, typename ReturnType, typename... ParamTypes>
+  friend void* getMethodStartAddress(const T& obj, ReturnType (T::*method)(ParamTypes...) const);
+
+#if __GNUG__
+
+  void* ptr;
+  ptrdiff_t adj;
+  // Layout of a pointer-to-member-function used by GCC and compatible compilers.
+
+  void* apply(const void* obj) {
+#if defined(__arm__) || defined(__mips__) || defined(__aarch64__)
+    if (adj & 1) {
+      ptrdiff_t voff = (ptrdiff_t)ptr;
+#else
+    ptrdiff_t voff = (ptrdiff_t)ptr;
+    if (voff & 1) {
+      voff &= ~1;
+#endif
+      return *(void**)(*(char**)obj + voff);
+    } else {
+      return ptr;