* Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
* 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others
* You may distribute under the terms of either the GNU General Public
* License or the Artistic License, as specified in the README file.
* The fields of BASEOP are:
* op_next Pointer to next ppcode to execute after this one.
* (Top level pre-grafted op points to first op,
* but this is replaced when op is grafted in, when
* this op will point to the real next op, and the new
* parent takes over role of remembering starting op.)
* op_ppaddr Pointer to current ppcode's function.
* op_type The type of the operation.
* op_opt Whether or not the op has been optimised by the
* op_slabbed allocated via opslab
* op_static tell op_free() to skip PerlMemShared_free(), when
* op_savefree on savestack via SAVEFREEOP
* op_folded Result/remainder of a constant fold operation.
* op_moresib this op is is not the last sibling
* op_flags Flags common to all operations. See OPf_* below.
* op_private Flags peculiar to a particular operation (BUT,
* by default, set to the number of children until
* the operation is privatized by a check routine,
* which may or may not check number of children).
#include "op_reg_common.h"
typedef PERL_BITFIELD16 Optype;
/* this field now either points to the next sibling or to the parent,
* depending on op_moresib. So rename it from op_sibling to op_sibparent.
# define _OP_SIBPARENT_FIELDNAME op_sibparent
# define _OP_SIBPARENT_FIELDNAME op_sibling
#define BASEOP BASEOP_DEFINITION
OP* _OP_SIBPARENT_FIELDNAME;\
OP* (*op_ppaddr)(pTHX); \
PERL_BITFIELD16 op_type:9; \
PERL_BITFIELD16 op_opt:1; \
PERL_BITFIELD16 op_slabbed:1; \
PERL_BITFIELD16 op_savefree:1; \
PERL_BITFIELD16 op_static:1; \
PERL_BITFIELD16 op_folded:1; \
PERL_BITFIELD16 op_moresib:1; \
PERL_BITFIELD16 op_spare:1; \
/* If op_type:9 is changed to :10, also change cx_pusheval()
Also, if the type of op_type is ever changed (e.g. to PERL_BITFIELD32)
then all the other bit-fields before/after it should change their
types too to let VC pack them into the same 4 byte integer.*/
/* for efficiency, requires OPf_WANT_VOID == G_VOID etc */
#define OP_GIMME(op,dfl) \
(((op)->op_flags & OPf_WANT) ? ((op)->op_flags & OPf_WANT) : dfl)
#define OP_GIMME_REVERSE(flags) ((flags) & G_WANT)
=for apidoc Amn|U32|GIMME_V
The XSUB-writer's equivalent to Perl's C<wantarray>. Returns C<G_VOID>,
C<G_SCALAR> or C<G_ARRAY> for void, scalar or list context,
respectively. See L<perlcall> for a usage example.
=for apidoc Amn|U32|GIMME
A backward-compatible version of C<GIMME_V> which can only return
C<G_SCALAR> or C<G_ARRAY>; in a void context, it returns C<G_SCALAR>.
Deprecated. Use C<GIMME_V> instead.
#define GIMME_V OP_GIMME(PL_op, block_gimme())
#define OPf_WANT 3 /* Mask for "want" bits: */
#define OPf_WANT_VOID 1 /* Want nothing */
#define OPf_WANT_SCALAR 2 /* Want single value */
#define OPf_WANT_LIST 3 /* Want list of any length */
#define OPf_KIDS 4 /* There is a firstborn child. */
#define OPf_PARENS 8 /* This operator was parenthesized. */
/* (Or block needs explicit scope entry.) */
#define OPf_REF 16 /* Certified reference. */
/* (Return container, not containee). */
#define OPf_MOD 32 /* Will modify (lvalue). */
#define OPf_STACKED 64 /* Some arg is arriving on the stack. */
#define OPf_SPECIAL 128 /* Do something weird for this op: */
/* On local LVAL, don't init local value. */
/* On OP_SORT, subroutine is inlined. */
/* On OP_NOT, inversion was implicit. */
/* On OP_LEAVE, don't restore curpm, e.g.
/* On truncate, we truncate filehandle */
/* On control verbs, we saw no label */
/* On flipflop, we saw ... instead of .. */
/* On UNOPs, saw bare parens, e.g. eof(). */
/* On OP_CHDIR, handle (or bare parens) */
/* On OP_NULL, saw a "do". */
/* On OP_EXISTS, treat av as av, not avhv. */
/* On OP_(ENTER|LEAVE)EVAL, don't clear $@ */
/* On regcomp, "use re 'eval'" was in scope */
/* On RV2[ACGHS]V, don't create GV--in
/* On OP_DBSTATE, indicates breakpoint
/* On OP_REQUIRE, was seen as CORE::require */
/* On OP_(ENTER|LEAVE)WHEN, there's
/* On OP_SMARTMATCH, an implicit smartmatch */
/* On OP_ANONHASH and OP_ANONLIST, create a
reference to the new anon hash or array */
/* On OP_HELEM, OP_MULTIDEREF and OP_HSLICE,
localization will be followed by assignment,
so do not wipe the target if it is special
(e.g. a glob or a magic SV) */
/* On OP_MATCH, OP_SUBST & OP_TRANS, the
operand of a logical or conditional
that was optimised away, so it should
/* On OP_CONST, from a constant CV */
/* On OP_GLOB, two meanings:
- Before ck_glob, called as CORE::glob
- After ck_glob, use Perl glob function
/* On OP_PADRANGE, push @_ */
/* On OP_DUMP, has no label */
/* On OP_UNSTACK, in a C-style for loop */
/* There is no room in op_flags for this one, so it has its own bit-
field member (op_folded) instead. The flag is only used to tell
op_convert_list to set op_folded. */
#define OPf_FOLDED (1<<16)
/* old names; don't use in new code, but don't break them, either */
#define OPf_LIST OPf_WANT_LIST
#define OPf_KNOW OPf_WANT
#if !defined(PERL_CORE) && !defined(PERL_EXT)
(PL_op->op_flags & OPf_WANT \
? ((PL_op->op_flags & OPf_WANT) == OPf_WANT_LIST \
/* NOTE: OPp* flags are now auto-generated and defined in opcode.h,
* from data in regen/op_private */
#define OPpTRANS_ALL (OPpTRANS_FROM_UTF|OPpTRANS_TO_UTF|OPpTRANS_IDENTICAL|OPpTRANS_SQUASH|OPpTRANS_COMPLEMENT|OPpTRANS_GROWS|OPpTRANS_DELETE)
/* Mask for OP_ENTERSUB flags, the absence of which must be propagated
#define OPpENTERSUB_LVAL_MASK (OPpLVAL_INTRO|OPpENTERSUB_INARGS)
/* things that can be elements of op_aux */
# define UNOP_AUX_item_sv(item) PAD_SVl((item)->pad_offset);
# define UNOP_AUX_item_sv(item) ((item)->sv);
/* op_u.op_first *must* be aligned the same as the op_first
* field of the other op types, and op_u.op_meth_sv *must*
* be aligned with op_sv */
OP* op_first; /* optree for method name */
SV* op_meth_sv; /* static method name */
PADOFFSET op_rclass_targ; /* pad index for redirect class */
SV* op_rclass_sv; /* static redirect class $o->A::meth() */
REGEXP * op_pmregexp; /* compiled expression */
OP * op_pmreplroot; /* For OP_SUBST */
PADOFFSET op_pmtargetoff; /* For OP_SPLIT lex ary or thr GV */
GV * op_pmtargetgv; /* For OP_SPLIT non-threaded GV */
OP * op_pmreplstart; /* Only used in OP_SUBST */
PADOFFSET op_pmstashoff; /* Only used in OP_MATCH, with PMf_ONCE set */
OP * op_code_list; /* list of (?{}) code blocks */
#define PM_GETRE(o) (SvTYPE(PL_regex_pad[(o)->op_pmoffset]) == SVt_REGEXP \
? (REGEXP*)(PL_regex_pad[(o)->op_pmoffset]) : NULL)
/* The assignment is just to enforce type safety (or at least get a warning).
/* With first class regexps not via a reference one needs to assign
&PL_sv_undef under ithreads. (This would probably work unthreaded, but NULL
is cheaper. I guess we could allow NULL, but the check above would get
more complex, and we'd have an AV with (SV*)NULL in it, which feels bad */
/* BEWARE - something that calls this macro passes (r) which has a side
#define PM_SETRE(o,r) STMT_START { \
REGEXP *const _pm_setre = (r); \
PL_regex_pad[(o)->op_pmoffset] = MUTABLE_SV(_pm_setre); \
#define PM_GETRE(o) ((o)->op_pmregexp)
#define PM_SETRE(o,r) ((o)->op_pmregexp = (r))
/* Currently these PMf flags occupy a single 32-bit word. Not all bits are
* currently used. The lower bits are shared with their corresponding RXf flag
* bits, up to but not including _RXf_PMf_SHIFT_NEXT. The unused bits
* immediately follow; finally the used Pmf-only (unshared) bits, so that the
* highest bit in the word is used. This gathers all the unused bits as a pool
* in the middle, like so: 11111111111111110000001111111111
* where the '1's represent used bits, and the '0's unused. This design allows
* us to allocate off one end of the pool if we need to add a shared bit, and
* off the other end if we need a non-shared bit, without disturbing the other
* bits. This maximizes the likelihood of being able to change things without
* breaking binary compatibility.
* To add shared bits, do so in op_reg_common.h. This should change
* _RXf_PMf_SHIFT_NEXT so that things won't compile. Then come to regexp.h and
* op.h and adjust the constant adders in the definitions of PMf_BASE_SHIFT and
* Pmf_BASE_SHIFT down by the number of shared bits you added. That's it.
* Things should be binary compatible. But if either of these gets to having
* to subtract rather than add, leave at 0 and adjust all the entries below
* that are in terms of this according. But if the first one of those is
* already PMf_BASE_SHIFT+0, there are no bits left, and a redesign is in
* To remove unshared bits, just delete its entry. If you're where breaking
* binary compatibility is ok to do, you might want to adjust things to move
* the newly opened space so that it gets absorbed into the common pool.
* To add unshared bits, first use up any gaps in the middle. Otherwise,
* allocate off the low end until you get to PMf_BASE_SHIFT+0. If that isn't
* enough, move PMf_BASE_SHIFT down (if possible) and add the new bit at the
* other end instead; this preserves binary compatibility. */
#define PMf_BASE_SHIFT (_RXf_PMf_SHIFT_NEXT+2)
/* Set by the parser if it discovers an error, so the regex shouldn't be
#define PMf_HAS_ERROR (1U<<(PMf_BASE_SHIFT+4))
/* 'use re "taint"' in scope: taint $1 etc. if target tainted */
#define PMf_RETAINT (1U<<(PMf_BASE_SHIFT+5))
/* match successfully only once per reset, with related flag RXf_USED in
* re->extflags holding state. This is used only for ?? matches, and only on
#define PMf_ONCE (1U<<(PMf_BASE_SHIFT+6))
/* PMf_ONCE, i.e. ?pat?, has matched successfully. Not used under threading. */
#define PMf_USED (1U<<(PMf_BASE_SHIFT+7))
/* subst replacement is constant */
#define PMf_CONST (1U<<(PMf_BASE_SHIFT+8))
/* keep 1st runtime pattern forever */
#define PMf_KEEP (1U<<(PMf_BASE_SHIFT+9))
#define PMf_GLOBAL (1U<<(PMf_BASE_SHIFT+10)) /* pattern had a g modifier */
/* don't reset pos() if //g fails */
#define PMf_CONTINUE (1U<<(PMf_BASE_SHIFT+11))
/* evaluating replacement as expr */
#define PMf_EVAL (1U<<(PMf_BASE_SHIFT+12))
/* Return substituted string instead of modifying it. */
#define PMf_NONDESTRUCT (1U<<(PMf_BASE_SHIFT+13))
/* the pattern has a CV attached (currently only under qr/...(?{}).../) */
#define PMf_HAS_CV (1U<<(PMf_BASE_SHIFT+14))
/* op_code_list is private; don't free it etc. It may well point to
* code within another sub, with different pad etc */
#define PMf_CODELIST_PRIVATE (1U<<(PMf_BASE_SHIFT+15))
/* the PMOP is a QR (we should be able to detect that from the op type,
* but the regex compilation API passes just the pm flags, not the op
#define PMf_IS_QR (1U<<(PMf_BASE_SHIFT+16))
#define PMf_USE_RE_EVAL (1U<<(PMf_BASE_SHIFT+17)) /* use re'eval' in scope */
/* See comments at the beginning of these defines about adding bits. The
* highest bit position should be used, so that if PMf_BASE_SHIFT gets
* increased, the #error below will be triggered so that you will be reminded
* to adjust things at the other end to keep the bit positions unchanged */
#if PMf_BASE_SHIFT+17 > 31
# error Too many PMf_ bits used. See above and regnodes.h for any spare in middle
# define PmopSTASH(o) ((o)->op_pmflags & PMf_ONCE \
? PL_stashpad[(o)->op_pmstashstartu.op_pmstashoff] \
# define PmopSTASH_set(o,hv) \
(assert_((o)->op_pmflags & PMf_ONCE) \
(o)->op_pmstashstartu.op_pmstashoff = \
(hv) ? alloccopstash(hv) : 0)
(((o)->op_pmflags & PMf_ONCE) ? (o)->op_pmstashstartu.op_pmstash : NULL)
# if defined (DEBUGGING) && defined(__GNUC__) && !defined(PERL_GCC_BRACE_GROUPS_FORBIDDEN)
# define PmopSTASH_set(o,hv) ({ \
assert((o)->op_pmflags & PMf_ONCE); \
((o)->op_pmstashstartu.op_pmstash = (hv)); \
# define PmopSTASH_set(o,hv) ((o)->op_pmstashstartu.op_pmstash = (hv))
#define PmopSTASHPV(o) (PmopSTASH(o) ? HvNAME_get(PmopSTASH(o)) : NULL)
/* op_pmstashstartu.op_pmstash is not refcounted */
#define PmopSTASHPV_set(o,pv) PmopSTASH_set((o), gv_stashpv(pv,GV_ADD))
#define cUNOPx(o) ((UNOP*)(o))
#define cUNOP_AUXx(o) ((UNOP_AUX*)(o))
#define cBINOPx(o) ((BINOP*)(o))
#define cLISTOPx(o) ((LISTOP*)(o))
#define cLOGOPx(o) ((LOGOP*)(o))
#define cPMOPx(o) ((PMOP*)(o))
#define cSVOPx(o) ((SVOP*)(o))
#define cPADOPx(o) ((PADOP*)(o))
#define cPVOPx(o) ((PVOP*)(o))
#define cCOPx(o) ((COP*)(o))
#define cLOOPx(o) ((LOOP*)(o))
#define cMETHOPx(o) ((METHOP*)(o))
#define cUNOP cUNOPx(PL_op)
#define cUNOP_AUX cUNOP_AUXx(PL_op)
#define cBINOP cBINOPx(PL_op)
#define cLISTOP cLISTOPx(PL_op)
#define cLOGOP cLOGOPx(PL_op)
#define cPMOP cPMOPx(PL_op)
#define cSVOP cSVOPx(PL_op)
#define cPADOP cPADOPx(PL_op)
#define cPVOP cPVOPx(PL_op)
#define cCOP cCOPx(PL_op)
#define cLOOP cLOOPx(PL_op)
#define cUNOP_AUXo cUNOP_AUXx(o)
#define cBINOPo cBINOPx(o)
#define cLISTOPo cLISTOPx(o)
#define cLOGOPo cLOGOPx(o)
#define cPADOPo cPADOPx(o)
#define kUNOP cUNOPx(kid)
#define kUNOP_AUX cUNOP_AUXx(kid)
#define kBINOP cBINOPx(kid)
#define kLISTOP cLISTOPx(kid)
#define kLOGOP cLOGOPx(kid)
#define kPMOP cPMOPx(kid)
#define kSVOP cSVOPx(kid)
#define kPADOP cPADOPx(kid)
#define kPVOP cPVOPx(kid)
#define kLOOP cLOOPx(kid)
OPclass_UNOP_AUX /* 13 */