Economics Faculty Database
Economics
Arts & Sciences
Duke University

 HOME > Arts & Sciences > Economics > Faculty    Search Help Login pdf version printable version 

Publications of Federico Bugni    :chronological  alphabetical  combined listing:

%% Working Papers   
@article{fds336353,
   Author = {Bugni, FA and Canay, IA and Shaikh, AM},
   Title = {Inference Under Covariate-Adaptive Randomization},
   Journal = {Journal of the American Statistical Association},
   Volume = {113},
   Number = {524},
   Pages = {1784-1796},
   Publisher = {Informa UK Limited},
   Year = {2018},
   Month = {October},
   url = {http://dx.doi.org/10.1080/01621459.2017.1375934},
   Abstract = {© 2018, © 2018 American Statistical Association. This
             article studies inference for the average treatment effect
             in randomized controlled trials with covariate-adaptive
             randomization. Here, by covariate-adaptive randomization, we
             mean randomization schemes that first stratify according to
             baseline covariates and then assign treatment status so as
             to achieve “balance” within each stratum. Our main
             requirement is that the randomization scheme assigns
             treatment status within each stratum so that the fraction of
             units being assigned to treatment within each stratum has a
             well behaved distribution centered around a proportion π as
             the sample size tends to infinity. Such schemes include, for
             example, Efron’s biased-coin design and stratified block
             randomization. When testing the null hypothesis that the
             average treatment effect equals a prespecified value in such
             settings, we first show the usual two-sample t-test is
             conservative in the sense that it has limiting rejection
             probability under the null hypothesis no greater than and
             typically strictly less than the nominal level. We show,
             however, that a simple adjustment to the usual standard
             error of the two-sample t-test leads to a test that is exact
             in the sense that its limiting rejection probability under
             the null hypothesis equals the nominal level. Next, we
             consider the usual t-test (on the coefficient on treatment
             assignment) in a linear regression of outcomes on treatment
             assignment and indicators for each of the strata. We show
             that this test is exact for the important special case of
             randomization schemes with π=1/2, but is otherwise
             conservative. We again provide a simple adjustment to the
             standard errors that yields an exact test more generally.
             Finally, we study the behavior of a modified version of a
             permutation test, which we refer to as the
             covariate-adaptive permutation test, that only permutes
             treatment status for units within the same stratum. When
             applied to the usual two-sample t-statistic, we show that
             this test is exact for randomization schemes with π=1/2 and
             that additionally achieve what we refer to as “strong
             balance.” For randomization schemes with π≠1/2, this
             test may have limiting rejection probability under the null
             hypothesis strictly greater than the nominal level. When
             applied to a suitably adjusted version of the two-sample
             t-statistic, however, we show that this test is exact for
             all randomization schemes that achieve “strong balance,”
             including those with π≠1/2. A simulation study confirms
             the practical relevance of our theoretical results. We
             conclude with recommendations for empirical practice and an
             empirical illustration. Supplementary materials for this
             article are available online.},
   Doi = {10.1080/01621459.2017.1375934},
   Key = {fds336353}
}

@article{fds325923,
   Author = {Bugni, FA and Canay, IA and Shi, X},
   Title = {Inference for subvectors and other functions of partially
             identified parameters in moment inequality
             models},
   Journal = {Quantitative Economics},
   Volume = {8},
   Number = {1},
   Pages = {1-38},
   Publisher = {The Econometric Society},
   Year = {2017},
   Month = {March},
   url = {http://dx.doi.org/10.3982/QE490},
   Doi = {10.3982/QE490},
   Key = {fds325923}
}

@article{fds238049,
   Author = {Aucejo, EM and Bugni, FA and Hotz, VJ},
   Title = {Identification and inference on regressions with missing
             covariate data},
   Journal = {Econometric Theory},
   Volume = {33},
   Number = {1},
   Pages = {196-241},
   Publisher = {Cambridge University Press (CUP)},
   Year = {2017},
   Month = {February},
   ISSN = {0266-4666},
   url = {http://dx.doi.org/10.1017/S0266466615000250},
   Abstract = {© Cambridge University Press 2015. This paper examines the
             problem of identification and inference on a conditional
             moment condition model with missing data, with special focus
             on the case when the conditioning covariates are missing. We
             impose no assumption on the distribution of the missing data
             and we confront the missing data problem by using a worst
             case scenario approach. We characterize the sharp identified
             set and argue that this set is usually too complex to
             compute or to use for inference. Given this difficulty, we
             consider the construction of outer identified sets (i.e.
             supersets of the identified set) that are easier to compute
             and can still characterize the parameter of interest. Two
             different outer identification strategies are proposed. Both
             of these strategies are shown to have nontrivial identifying
             power and are relatively easy to use and combine for
             inferential purposes.},
   Doi = {10.1017/S0266466615000250},
   Key = {fds238049}
}

@article{fds238050,
   Author = {Bugni, FA and Canay, IA and Shi, X},
   Title = {Specification tests for partially identified models defined
             by moment inequalities},
   Journal = {Journal of Econometrics},
   Volume = {185},
   Number = {1},
   Pages = {259-282},
   Publisher = {Elsevier BV},
   Year = {2015},
   Month = {January},
   ISSN = {0304-4076},
   url = {http://dx.doi.org/10.1016/j.jeconom.2014.10.013},
   Abstract = {© 2014 Elsevier B.V. All rights reserved. This paper
             studies the problem of specification testing in partially
             identified models defined by moment (in)equalities. This
             problem has not been directly addressed in the literature,
             although several papers have suggested a test based on
             checking whether confidence sets for the parameters of
             interest are empty or not, referred to as Test BP. We
             propose two new specification tests, denoted Test RS and
             Test RC, that achieve uniform asymptotic size control and
             dominate Test BP in terms of power in any finite sample and
             in the asymptotic limit.},
   Doi = {10.1016/j.jeconom.2014.10.013},
   Key = {fds238050}
}

@article{fds323212,
   Author = {Bugni, FA},
   Title = {COMPARISON of INFERENTIAL METHODS in PARTIALLY IDENTIFIED
             MODELS in TERMS of ERROR in COVERAGE PROBABILITY},
   Journal = {Econometric Theory},
   Volume = {32},
   Number = {1},
   Pages = {187-242},
   Year = {2014},
   Month = {October},
   url = {http://dx.doi.org/10.1017/S0266466614000826},
   Abstract = {Copyright © Cambridge University Press 2014. This paper
             considers the problem of coverage of the elements of the
             identified set in a class of partially identified
             econometric models with a prespecified probability. In order
             to conduct inference in partially identified econometric
             models defined by moment (in)equalities, the literature has
             proposed three methods: bootstrap, subsampling, and
             asymptotic approximation. The objective of this paper is to
             compare these methods in terms of the rate at which they
             achieve the desired coverage level, i.e., in terms of the
             rate at which the error in the coverage probability (ECP)
             converges to zero. Under certain conditions, we show that
             the ECP of the bootstrap and the ECP of the asymptotic
             approximation converge to zero at the same rate, which is a
             faster rate than that of the ECP of subsampling methods. As
             a consequence, under these conditions, the bootstrap and the
             asymptotic approximation produce inference that is more
             precise than subsampling. A Monte Carlo simulation study
             confirms that these results are relevant in finite
             samples.},
   Doi = {10.1017/S0266466614000826},
   Key = {fds323212}
}

@article{fds238052,
   Author = {Arcidiacono, P and Bayer, P and Bugni, FA and James,
             J},
   Title = {Approximating High-Dimensional Dynamic Models: Sieve Value
             Function Iteration},
   Journal = {Advances in Econometrics},
   Volume = {31},
   Pages = {45-95},
   Publisher = {Emerald Group Publishing Limited},
   Year = {2013},
   Month = {January},
   ISSN = {0731-9053},
   url = {http://dx.doi.org/10.1108/S0731-9053(2013)0000032002},
   Abstract = {Many dynamic problems in economics are characterized by
             large state spaces which make both computing and estimating
             the model infeasible. We introduce a method for
             approximating the value function of highdimensional dynamic
             models based on sieves and establish results for the (a)
             consistency, (b) rates of convergence, and (c) bounds on the
             error of approximation. We embed this method for
             approximating the solution to the dynamic problem within an
             estimation routine and prove that it provides consistent
             estimates of the modelik's parameters. We provide Monte
             Carlo evidence that our method can successfully be used to
             approximate models that would otherwise be infeasible to
             compute, suggesting that these techniques may substantially
             broaden the class of models that can be solved and
             estimated. Copyright © 2013 by Emerald Group Publishing
             Limited.},
   Doi = {10.1108/S0731-9053(2013)0000032002},
   Key = {fds238052}
}

@article{fds238055,
   Author = {Bugni, FA},
   Title = {Child labor legislation: Effective, benign, both, or
             neither?},
   Journal = {Cliometrica},
   Volume = {6},
   Number = {3},
   Pages = {223-248},
   Publisher = {Springer Nature},
   Year = {2012},
   Month = {October},
   ISSN = {1863-2505},
   url = {http://dx.doi.org/10.1007/s11698-011-0073-4},
   Abstract = {This paper explores the relationship between the
             state-specific child labor legislation and the decline in
             child labor that occurred in the US between 1880 and 1900.
             The existing literature that addresses this question uses a
             difference-in-difference estimation technique. We contribute
             to this literature in two ways. First, we argue that this
             estimation technique can produce misleading results due to
             (a) the possibility of multiplicity of equilibria and (b)
             the non-linearity of the underlying econometric model.
             Second, we develop an empirical strategy to identify the
             mechanism by which the legislation affected child labor
             decisions. In particular, besides establishing whether the
             legislation was effective or not, our analysis may determine
             whether the legislation constituted a benign policy or not,
             i. e., whether the legislation constrained the behavior of
             families (not benign) or whether it changed the labor market
             to a new equilibrium in which families voluntarily respected
             the law (benign). © 2011 Springer-Verlag.},
   Doi = {10.1007/s11698-011-0073-4},
   Key = {fds238055}
}

@article{fds238056,
   Author = {Bugni, FA},
   Title = {Specification test for missing functional
             data},
   Journal = {Econometric Theory},
   Volume = {28},
   Number = {5},
   Pages = {959-1002},
   Publisher = {Cambridge University Press (CUP)},
   Year = {2012},
   Month = {October},
   ISSN = {0266-4666},
   url = {http://dx.doi.org/10.1017/S0266466612000023},
   Abstract = {Economic data are frequently generated by stochastic
             processes that can be modeled as realizations of random
             functions (functional data). This paper adapts the
             specification test for functional data developed by Bugni,
             Hall, Horowitz, and Neumann (2009, Econometrics Journal12,
             S1a-S18) to the presence of missing observations. By using a
             worst case scenario approach, our method is able to extract
             the information available in the observed portion of the
             data while being agnostic about the nature of the missing
             observations. The presence of missing data implies that our
             test will not only result in the rejection or lack of
             rejection of the null hypothesis, but it may also be
             inconclusive. Under the null hypothesis, our specification
             test will reject the null hypothesis with a probability
             that, in the limit, does not exceed the significance level
             of the test. Moreover, the power of the test converges to
             one whenever the distribution of the observations conveys
             that the null hypothesis is false. Monte Carlo evidence
             shows that the test may produce informative results (either
             rejection or lack of rejection of the null hypothesis) even
             under the presence of significant amounts of missing data.
             The procedure is illustrated by testing whether the
             Burdetta-Mortensen labor market model is the correct
             framework for wage paths constructed from the National
             Longitudinal Survery of Youth, 1979 survey. © 2012
             Cambridge University Press.},
   Doi = {10.1017/S0266466612000023},
   Key = {fds238056}
}

@article{fds238054,
   Author = {Bugni, FA and Canay, IA and Guggenberger, P},
   Title = {Distortions of Asymptotic Confidence Size in Locally
             Misspecified Moment Inequality Models},
   Journal = {Econometrica},
   Volume = {80},
   Number = {4},
   Pages = {1741-1768},
   Publisher = {The Econometric Society},
   Year = {2012},
   Month = {July},
   ISSN = {0012-9682},
   url = {http://dx.doi.org/10.3982/ECTA9604},
   Abstract = {This paper studies the behavior, under local
             misspecification, of several confidence sets (CSs) commonly
             used in the literature on inference in moment (in)equality
             models. We propose the amount of asymptotic confidence size
             distortion as a criterion to choose among competing
             inference methods. This criterion is then applied to compare
             across test statistics and critical values employed in the
             construction of CSs. We find two important results under
             weak assumptions. First, we show that CSs based on
             subsampling and generalized moment selection (Andrews and
             Soares (2010)) suffer from the same degree of asymptotic
             confidence size distortion, despite the fact that
             asymptotically the latter can lead to CSs with strictly
             smaller expected volume under correct model specification.
             Second, we show that the asymptotic confidence size of CSs
             based on the quasi-likelihood ratio test statistic can be an
             arbitrary small fraction of the asymptotic confidence size
             of CSs based on the modified method of moments test
             statistic. © 2012 The Econometric Society.},
   Doi = {10.3982/ECTA9604},
   Key = {fds238054}
}

@article{fds238057,
   Author = {Bugni, FA},
   Title = {Bootstrap inference in partially identified models defined
             by moment inequalities: Coverage of the identified
             set},
   Journal = {Econometrica},
   Volume = {78},
   Number = {2},
   Pages = {735-753},
   Publisher = {The Econometric Society},
   Year = {2010},
   Month = {March},
   ISSN = {0012-9682},
   url = {http://dx.doi.org/10.3982/ECTA8056},
   Abstract = {This paper introduces a novel bootstrap procedure to perform
             inference in a wide class of partially identified
             econometric models. We consider econometric models defined
             by finitely many weak moment inequalities,2 which encompass
             many applications of economic interest. The objective of our
             inferential procedure is to cover the identified set with a
             prespecified probability.3 We compare our bootstrap
             procedure, a competing asymptotic approximation, and
             subsampling procedures in terms of the rate at which they
             achieve the desired coverage level, also known as the error
             in the coverage probability. Under certain conditions, we
             show that our bootstrap procedure and the asymptotic
             approximation have the same order of error in the coverage
             probability, which is smaller than that obtained by using
             subsampling. This implies that inference based on our
             bootstrap and asymptotic approximation should eventually be
             more precise than inference based on subsampling. A Monte
             Carlo study confirms this finding in a small sample
             simulation. © 2010 The Econometric Society.},
   Doi = {10.3982/ECTA8056},
   Key = {fds238057}
}

@article{fds238053,
   Author = {Bugni, FA and Hall, P and Horowitz, JL and Neumann,
             GR},
   Title = {Goodness-of-fit tests for functional data},
   Journal = {The Econometrics Journal},
   Volume = {12},
   Number = {SUPPL. 1},
   Pages = {S1-S18},
   Year = {2009},
   Month = {July},
   ISSN = {1368-4221},
   url = {http://dx.doi.org/10.1111/j.1368-423X.2008.00266.x},
   Abstract = {Economic data are frequently generated by stochastic
             processes that can be modelled as occurring in continuous
             time. That is, the data are treated as realizations of a
             random function (functional data). Sometimes an economic
             theory model specifies the process up to a
             finite-dimensional parameter. This paper develops a test of
             the null hypothesis that a given functional data set was
             generated by a specified parametric model of a
             continuous-time process. The alternative hypothesis is
             non-parametric. A random function is a form of
             infinite-dimensional random variable, and the test presented
             here a generalization of the familiar Cramér-von Mises test
             to an infinite dimensional random variable. The test is
             illustrated by using it to test the hypothesis that a sample
             of wage paths was generated by a certain equilibrium job
             search model. Simulation studies show that the test has good
             finite-sample performance. © Journal compilation © 2009
             Royal Economic Society.},
   Doi = {10.1111/j.1368-423X.2008.00266.x},
   Key = {fds238053}
}


Duke University * Arts & Sciences * Economics * Faculty * Research * Staff * Master's * Ph.D. * Reload * Login